code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def _left_click(self, event):
self.update_active()
iid = self.current_iid
if iid is None:
return
args = (iid, event.x_root, event.y_root)
self.call_callbacks(iid, "left_callback", args) | Function bound to left click event for marker canvas |
def threshold(self, vmin=None, vmax=None, replaceWith=None):
th = vtk.vtkImageThreshold()
th.SetInputData(self.image)
if vmin is not None and vmax is not None:
th.ThresholdBetween(vmin, vmax)
elif vmin is not None:
th.ThresholdByUpper(vmin)
elif vmax is not None:
th.ThresholdByLower(vmax)
if replaceWith:
th.ReplaceOutOn()
th.SetOutValue(replaceWith)
th.Update()
self.image = th.GetOutput()
self.mapper.SetInputData(self.image)
self.mapper.Modified()
return self | Binary or continuous volume thresholding. |
def _detect(self):
results = []
for c in self.slither.contracts_derived:
unusedVars = self.detect_unused(c)
if unusedVars:
info = ''
for var in unusedVars:
info += "{}.{} ({}) is never used in {}\n".format(var.contract.name,
var.name,
var.source_mapping_str,
c.name)
json = self.generate_json_result(info)
self.add_variables_to_json(unusedVars, json)
results.append(json)
return results | Detect unused state variables |
def update_settings(self, service_id, version_number, settings={}):
body = urllib.urlencode(settings)
content = self._fetch("/service/%s/version/%d/settings" % (service_id, version_number), method="PUT", body=body)
return FastlySettings(self, content) | Update the settings for a particular service and version. |
def post_delete_helper(form_tag=True):
helper = FormHelper()
helper.form_action = '.'
helper.attrs = {'data_abide': ''}
helper.form_tag = form_tag
helper.layout = Layout(
ButtonHolderPanel(
Row(
Column(
'confirm',
css_class='small-12 medium-8'
),
Column(
Submit('submit', _('Submit')),
css_class='small-12 medium-4 text-right'
),
),
),
)
return helper | Message's delete form layout helper |
def create_port_channel(self, nexus_host, vpc_nbr):
starttime = time.time()
vpc_str = str(vpc_nbr)
path_snip = snipp.PATH_ALL
body_snip = snipp.BODY_ADD_PORT_CH % (vpc_str, vpc_str, vpc_str)
self.send_edit_string(nexus_host, path_snip, body_snip)
self._apply_user_port_channel_config(nexus_host, vpc_nbr)
self.capture_and_print_timeshot(
starttime, "create_port_channel",
switch=nexus_host) | Creates port channel n on Nexus switch. |
def _get_global_color_table(colors):
global_color_table = b''.join(c[0] for c in colors.most_common())
full_table_size = 2**(1+int(get_color_table_size(len(colors)), 2))
repeats = 3 * (full_table_size - len(colors))
zeros = struct.pack('<{}x'.format(repeats))
return global_color_table + zeros | Return a color table sorted in descending order of count. |
def base36(value):
result = ''
while value:
value, i = divmod(value, 36)
result = BASE36_ALPHABET[i] + result
return result | Encode int to base 36. |
def _run_parallel_multiprocess(self):
_log.debug("run.parallel.multiprocess.start")
processes = []
ProcRunner.instance = self
for i in range(self._ncores):
self._status.running(i)
proc = multiprocessing.Process(target=ProcRunner.run, args=(i,))
proc.start()
processes.append(proc)
for i in range(self._ncores):
processes[i].join()
code = processes[i].exitcode
self._status.success(i) if 0 == code else self._status.fail(i)
_log.debug("run.parallel.multiprocess.end states={}".format(self._status)) | Run processes from queue |
def _assemble_gap(stmt):
subj_str = _assemble_agent_str(stmt.gap)
obj_str = _assemble_agent_str(stmt.ras)
stmt_str = subj_str + ' is a GAP for ' + obj_str
return _make_sentence(stmt_str) | Assemble Gap statements into text. |
def _get_rnn_layer(mode, num_layers, input_size, hidden_size, dropout, weight_dropout):
if mode == 'rnn_relu':
rnn_block = functools.partial(rnn.RNN, activation='relu')
elif mode == 'rnn_tanh':
rnn_block = functools.partial(rnn.RNN, activation='tanh')
elif mode == 'lstm':
rnn_block = rnn.LSTM
elif mode == 'gru':
rnn_block = rnn.GRU
block = rnn_block(hidden_size, num_layers, dropout=dropout,
input_size=input_size)
if weight_dropout:
apply_weight_drop(block, '.*h2h_weight', rate=weight_dropout)
return block | create rnn layer given specs |
def search(self, **kw):
q = db.select(self.table).condition('status', 'active')
for k, v in kw:
q.condition(k, v)
data = q.execute()
users = []
for user in data:
users.append(self.load(user, self.model))
return users | Find the users match the condition in kw |
def create_blazar_client(config, session):
return blazar_client.Client(session=session,
service_type="reservation",
region_name=os.environ["OS_REGION_NAME"]) | Check the reservation, creates a new one if nescessary. |
def init_repo(path):
sh("git clone %s %s"%(pages_repo, path))
here = os.getcwd()
cd(path)
sh('git checkout gh-pages')
cd(here) | clone the gh-pages repo if we haven't already. |
def parse(self, argv):
kwargs, args = self.parse_args(argv)
self.result['args'] += args
for dest in self.dests:
value = getattr(kwargs, dest)
if value is not None:
self.result['kwargs'][dest] = value
return self | Parse the given argument vector. |
def rm_gos(self, rm_goids):
self.edges = self._rm_gos_edges(rm_goids, self.edges)
self.edges_rel = self._rm_gos_edges_rel(rm_goids, self.edges_rel) | Remove any edges that contain user-specified edges. |
def merge(self, dct=None, **kwargs):
if dct is None:
dct = {}
if kwargs:
dct.update(**kwargs)
for key, value in dct.items():
if all((
isinstance(value, dict),
isinstance(self.get(key), Configuration),
getattr(self.get(key), "__merge__", True),
)):
self[key].merge(value)
elif isinstance(value, list) and isinstance(self.get(key), list):
self[key] += value
else:
self[key] = value | Recursively merge a dictionary or kwargs into the current dict. |
def createConnection(self):
readerobj = None
if isinstance(self.reader, Reader):
readerobj = self.reader
elif type(self.reader) == str:
for reader in readers():
if self.reader == str(reader):
readerobj = reader
if readerobj:
return readerobj.createConnection()
else:
return None | Return a CardConnection to the Card object. |
def add_content(obj, language, slot, content):
placeholder = obj.placeholders.get(slot=slot)
add_plugin(placeholder, TextPlugin, language, body=content) | Adds a TextPlugin with given content to given slot |
def parse(lang_sample):
words = words_from_archive(lang_sample, include_dups=True)
counts = zero_default_dict()
for word in words:
counts[word] += 1
return set(words), counts | tally word popularity using novel extracts, etc |
def str_to_python_object(input_str):
if not input_str:
return None
if six.PY3 and isinstance(input_str, six.binary_type):
input_str = to_str(input_str)
if not isinstance(input_str, six.string_types):
return input_str
input_str = str_quote_stripper(input_str)
if '.' not in input_str and input_str in known_mapping_str_to_type:
return known_mapping_str_to_type[input_str]
parts = [x.strip() for x in input_str.split('.') if x.strip()]
try:
try:
package = __import__(input_str)
except ImportError:
if len(parts) == 1:
parts = ('__main__', input_str)
package = __import__('.'.join(parts[:-1]), globals(), locals(), [])
obj = package
for name in parts[1:]:
obj = getattr(obj, name)
return obj
except AttributeError as x:
raise CannotConvertError("%s cannot be found" % input_str)
except ImportError as x:
raise CannotConvertError(str(x)) | a conversion that will import a module and class name |
def install_template(username, repo):
print("Installing template from " + username + "/" + repo)
dpath = os.path.join(cwd, "templates")
getunzipped(username, repo, dpath) | Installs a Blended template from GitHub |
def upgrade_all(self):
for pkg in self.installed_package_names:
self.install(pkg, upgrade=True) | Upgrades all installed packages to their latest versions. |
def fasta(self):
max_line_length = 79
fasta_str = '>{0}:{1}|PDBID|CHAIN|SEQUENCE\n'.format(
self.parent.id.upper(), self.id)
seq = self.sequence
split_seq = [seq[i: i + max_line_length]
for i in range(0, len(seq), max_line_length)]
for seq_part in split_seq:
fasta_str += '{0}\n'.format(seq_part)
return fasta_str | Generates sequence data for the protein in FASTA format. |
def __query_options(self):
options = 0
if self.__tailable:
options |= _QUERY_OPTIONS["tailable_cursor"]
if self.__slave_okay or self.__pool._slave_okay:
options |= _QUERY_OPTIONS["slave_okay"]
if not self.__timeout:
options |= _QUERY_OPTIONS["no_timeout"]
return options | Get the query options string to use for this query. |
def publish_alias(self, func_data, alias):
if not alias:
return func_data['FunctionArn']
func_name = func_data['FunctionName']
func_version = func_data['Version']
exists = resource_exists(
self.client.get_alias, FunctionName=func_name, Name=alias)
if not exists:
log.debug("Publishing custodian lambda alias %s", alias)
alias_result = self.client.create_alias(
FunctionName=func_name,
Name=alias,
FunctionVersion=func_version)
else:
if (exists['FunctionVersion'] == func_version and
exists['Name'] == alias):
return exists['AliasArn']
log.debug('Updating custodian lambda alias %s', alias)
alias_result = self.client.update_alias(
FunctionName=func_name,
Name=alias,
FunctionVersion=func_version)
return alias_result['AliasArn'] | Create or update an alias for the given function. |
def merge(self, cluster_ids=None, to=None):
if cluster_ids is None:
cluster_ids = self.selected
if len(cluster_ids or []) <= 1:
return
self.clustering.merge(cluster_ids, to=to)
self._global_history.action(self.clustering) | Merge the selected clusters. |
async def fetch_api_description(
url: typing.Union[str, ParseResult, SplitResult],
insecure: bool = False):
url_describe = urljoin(_ensure_url_string(url), "describe/")
connector = aiohttp.TCPConnector(verify_ssl=(not insecure))
session = aiohttp.ClientSession(connector=connector)
async with session, session.get(url_describe) as response:
if response.status != HTTPStatus.OK:
raise RemoteError(
"{0} -> {1.status} {1.reason}".format(
url, response))
elif response.content_type != "application/json":
raise RemoteError(
"Expected application/json, got: %s"
% response.content_type)
else:
return await response.json() | Fetch the API description from the remote MAAS instance. |
def list_devices(self):
if self.plm.devices:
for addr in self.plm.devices:
device = self.plm.devices[addr]
if device.address.is_x10:
_LOGGING.info('Device: %s %s', device.address.human,
device.description)
else:
_LOGGING.info('Device: %s cat: 0x%02x subcat: 0x%02x '
'desc: %s, model: %s',
device.address.human, device.cat,
device.subcat, device.description,
device.model)
else:
_LOGGING.info('No devices found')
if not self.plm.transport:
_LOGGING.info('IM connection has not been made.')
_LOGGING.info('Use `connect [device]` to open the connection') | List devices in the ALDB. |
def run(self):
if self.args['add']:
self.action_add()
elif self.args['rm']:
self.action_rm()
elif self.args['show']:
self.action_show()
elif self.args['rename']:
self.action_rename()
else:
self.action_run_command() | Perform the specified action |
async def read(cls, node, id):
if isinstance(node, str):
system_id = node
elif isinstance(node, Node):
system_id = node.system_id
else:
raise TypeError(
"node must be a Node or str, not %s"
% type(node).__name__)
return cls(await cls._handler.read(system_id=system_id, id=id)) | Get `Bcache` by `id`. |
def _assign_values_to_unbound_vars(unbound_vars, unbound_var_values):
context = {}
for key, value in six.iteritems(unbound_var_values):
if key not in unbound_vars:
raise ValueError('unexpected key: %s. Legal values are: %s' %
(key, list(six.iterkeys(unbound_vars))))
context[unbound_vars[key]] = value
unspecified = []
for unbound_var in six.itervalues(unbound_vars):
if unbound_var not in context:
if unbound_var.has_default():
context[unbound_var] = unbound_var.default
else:
unspecified.append(unbound_var.key)
if unspecified:
raise ValueError('Unspecified keys: %s' % unspecified)
return context | Assigns values to the vars and raises ValueError if one is missing. |
def _content_blocks(self, r):
return (self._block_rows - self._left_zero_blocks(r)
- self._right_zero_blocks(r)) | Number of content blocks in block row `r`. |
def dispatch_error(self, err):
try:
data = {'error': [l for l in err.args]}
body = self._meta.formatter.format(data)
except Exception as ex:
data = {'error': str(err)}
body = self._meta.formatter.format(data)
status = getattr(err, 'status', 500)
return self.build_http_response(body, status=status) | Handles the dispatch of errors |
def aot_rpush(self, exit_code):
if self.tcex.default_args.tc_playbook_db_type == 'Redis':
try:
self.db.rpush(self.tcex.default_args.tc_exit_channel, exit_code)
except Exception as e:
self.tcex.exit(1, 'Exception during AOT exit push ({}).'.format(e)) | Push message to AOT action channel. |
def _get_resource_raw(
self, cls, id, extra=None, headers=None, stream=False, **filters
):
headers = headers or {}
headers.update(self.session.headers)
postfix = "/{}".format(extra) if extra else ""
if cls.api_root != "a":
url = "{}/{}/{}{}".format(self.api_server, cls.collection_name, id, postfix)
else:
url = "{}/a/{}/{}/{}{}".format(
self.api_server, self.app_id, cls.collection_name, id, postfix
)
converted_filters = convert_datetimes_to_timestamps(
filters, cls.datetime_filter_attrs
)
url = str(URLObject(url).add_query_params(converted_filters.items()))
response = self._get_http_session(cls.api_root).get(
url, headers=headers, stream=stream
)
return _validate(response) | Get an individual REST resource |
def render_or_send(func, message):
if request.endpoint != func.func_name:
mail.send(message)
if (current_user.is_authenticated() and current_user.superuser):
return render_template('debug_email.html', message=message) | Renders an email message for debugging or actually sends it. |
def wait_for_compute_zone_operation(compute, project_name, operation, zone):
logger.info("wait_for_compute_zone_operation: "
"Waiting for operation {} to finish...".format(
operation["name"]))
for _ in range(MAX_POLLS):
result = compute.zoneOperations().get(
project=project_name, operation=operation["name"],
zone=zone).execute()
if "error" in result:
raise Exception(result["error"])
if result["status"] == "DONE":
logger.info("wait_for_compute_zone_operation: "
"Operation {} finished.".format(operation["name"]))
break
time.sleep(POLL_INTERVAL)
return result | Poll for compute zone operation until finished. |
def user_group_perms_processor(request):
org = None
group = None
if hasattr(request, "user"):
if request.user.is_anonymous:
group = None
else:
group = request.user.get_org_group()
org = request.user.get_org()
if group:
context = dict(org_perms=GroupPermWrapper(group))
else:
context = dict()
context["user_org"] = org
return context | return context variables with org permissions to the user. |
def primary_spin(mass1, mass2, spin1, spin2):
mass1, mass2, spin1, spin2, input_is_array = ensurearray(
mass1, mass2, spin1, spin2)
sp = copy.copy(spin1)
mask = mass1 < mass2
sp[mask] = spin2[mask]
return formatreturn(sp, input_is_array) | Returns the dimensionless spin of the primary mass. |
def screenshots_done(self, jobid):
resp = self.session.get(os.path.join(self.api_url, '{0}.json'.format(jobid)))
resp = self._process_response(resp)
return True if resp.json()['state'] == 'done' else False | Return true if the screenshots job is done |
def cursor_pos(self):
if len(self) == 0:
return self.left + self.default_text.get_width()
papy = self._surface.get_width()
if papy > self.w:
shift = papy - self.width
else:
shift = 0
return self.left + self.font.size(self.shawn_text[:self.cursor])[0] - shift | The cursor position in pixels. |
def group2commlst(commlst, glist):
for (gname, objname), commitem in zip(glist, commlst):
newitem1 = "group %s" % (gname, )
newitem2 = "idfobj %s" % (objname, )
commitem[0].insert(0, newitem1)
commitem[0].insert(1, newitem2)
return commlst | add group info to commlst |
def standard_settings(self):
cmd.set('bg_rgb', [1.0, 1.0, 1.0])
cmd.set('depth_cue', 0)
cmd.set('cartoon_side_chain_helper', 1)
cmd.set('cartoon_fancy_helices', 1)
cmd.set('transparency_mode', 1)
cmd.set('dash_radius', 0.05)
self.set_custom_colorset() | Sets up standard settings for a nice visualization. |
def temp_filename(self):
handle, filename = tempfile.mkstemp()
os.close(handle)
return filename | Return a unique tempfile name. |
def owner(self, data):
if data is not None:
self._request.add_payload('owner', data)
else:
self.tcex.log.warn(u'Provided owner was invalid. ({})'.format(data)) | The Owner payload value for this resource request. |
def load_settings_sizes():
page_size = AGNOCOMPLETE_DEFAULT_PAGESIZE
settings_page_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_PAGESIZE', None)
page_size = settings_page_size or page_size
page_size_min = AGNOCOMPLETE_MIN_PAGESIZE
settings_page_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_PAGESIZE', None)
page_size_min = settings_page_size_min or page_size_min
page_size_max = AGNOCOMPLETE_MAX_PAGESIZE
settings_page_size_max = getattr(
settings, 'AGNOCOMPLETE_MAX_PAGESIZE', None)
page_size_max = settings_page_size_max or page_size_max
query_size = AGNOCOMPLETE_DEFAULT_QUERYSIZE
settings_query_size = getattr(
settings, 'AGNOCOMPLETE_DEFAULT_QUERYSIZE', None)
query_size = settings_query_size or query_size
query_size_min = AGNOCOMPLETE_MIN_QUERYSIZE
settings_query_size_min = getattr(
settings, 'AGNOCOMPLETE_MIN_QUERYSIZE', None)
query_size_min = settings_query_size_min or query_size_min
return (
page_size, page_size_min, page_size_max,
query_size, query_size_min,
) | Load sizes from settings or fallback to the module constants |
def names_labels(self, do_print=False):
if do_print:
for name, label in zip(self.field_names, self.field_labels):
print('%s --> %s' % (str(name), str(label)))
return self.field_names, self.field_labels | Simple helper function to get all field names and labels |
def filename(self):
client_id, __ = os.path.splitext(self.video.client_video_id)
file_name = u'{name}-{language}.{format}'.format(
name=client_id,
language=self.language_code,
format=self.file_format
).replace('\n', ' ')
return file_name | Returns readable filename for a transcript |
def stop_job(self, job_id, array_id = None):
self.lock()
job, array_job = self._job_and_array(job_id, array_id)
if job is not None:
if job.status in ('executing', 'queued', 'waiting'):
logger.info("Reset job '%s' (%s) in the database", job.name, self._format_log(job.id))
job.status = 'submitted'
if array_job is not None and array_job.status in ('executing', 'queued', 'waiting'):
logger.debug("Reset array job '%s' in the database", array_job)
array_job.status = 'submitted'
if array_job is None:
for array_job in job.array:
if array_job.status in ('executing', 'queued', 'waiting'):
logger.debug("Reset array job '%s' in the database", array_job)
array_job.status = 'submitted'
self.session.commit()
self.unlock() | Resets the status of the given to 'submitted' when they are labeled as 'executing'. |
async def _receive_packet(self, pkt):
packet_name = packet.packet_names[pkt.packet_type] \
if pkt.packet_type < len(packet.packet_names) else 'UNKNOWN'
self.logger.info(
'Received packet %s data %s', packet_name,
pkt.data if not isinstance(pkt.data, bytes) else '<binary>')
if pkt.packet_type == packet.MESSAGE:
await self._trigger_event('message', pkt.data, run_async=True)
elif pkt.packet_type == packet.PONG:
self.pong_received = True
elif pkt.packet_type == packet.NOOP:
pass
else:
self.logger.error('Received unexpected packet of type %s',
pkt.packet_type) | Handle incoming packets from the server. |
def _show_final_overflow_message(self, row_overflow, col_overflow):
if row_overflow and col_overflow:
overflow_cause = _("rows and columns")
elif row_overflow:
overflow_cause = _("rows")
elif col_overflow:
overflow_cause = _("columns")
else:
raise AssertionError(_("Import cell overflow missing"))
statustext = \
_("The imported data did not fit into the grid {cause}. "
"It has been truncated. Use a larger grid for full import.").\
format(cause=overflow_cause)
post_command_event(self.main_window, self.StatusBarMsg,
text=statustext) | Displays overflow message after import in statusbar |
def duplicate_object_hook(ordered_pairs):
json_dict = {}
for key, val in ordered_pairs:
existing_val = json_dict.get(key)
if not existing_val:
json_dict[key] = val
else:
if isinstance(existing_val, list):
existing_val.append(val)
else:
json_dict[key] = [existing_val, val]
return json_dict | Make lists out of duplicate keys. |
def release(ctx, deploy=False, test=False, version=''):
if test:
run("python setup.py check")
run("python setup.py register sdist upload --dry-run")
if deploy:
run("python setup.py check")
if version:
run("git checkout master")
run("git tag -a v{ver} -m 'v{ver}'".format(ver=version))
run("git push")
run("git push origin --tags")
run("python setup.py sdist bdist_wheel")
run("twine upload --skip-existing dist/*")
else:
print("- Have you updated the version?")
print("- Have you updated CHANGELOG.md, README.md, and AUTHORS.md?")
print("- Have you fixed any last minute bugs?")
print("- Have you merged changes for release into the master branch?")
print("If you answered yes to all of the above questions,")
print("then run `inv release --deploy -vX.YY.ZZ` to:")
print("- Checkout master")
print("- Tag the git release with provided vX.YY.ZZ version")
print("- Push the master branch and tags to repo") | Tag release, run Travis-CI, and deploy to PyPI |
def init_defaults(self):
super(QueryTable, self).init_defaults()
self.query = self.table
self.query.is_inner = True | Sets a query instance variable to the table value |
def _javascript_helper(self, position):
if position not in ["header", "footer"]:
position = "footer"
if position == "header":
entries = [entry for entry in self._plugin_manager.call_hook("javascript_header") if entry is not None]
else:
entries = [entry for entry in self._plugin_manager.call_hook("javascript_footer") if entry is not None]
entries += self._get_ctx()["javascript"][position]
entries = ["<script src='" + entry + "' type='text/javascript' charset='utf-8'></script>" for entry in entries]
return "\n".join(entries) | Add javascript links for the current page and for the plugins |
def result(self):
for _ in range(self.num_threads):
self.tasks_queue.put(None)
self.tasks_queue.join()
if not self.exceptions_queue.empty():
raise self.exceptions_queue.get()
return self.results_queue | Stop threads and return the result of all called tasks |
def worker_process(params, channel):
signal(SIGINT, SIG_IGN)
if params.initializer is not None:
if not run_initializer(params.initializer, params.initargs):
os._exit(1)
try:
for task in worker_get_next_task(channel, params.max_tasks):
payload = task.payload
result = process_execute(
payload.function, *payload.args, **payload.kwargs)
send_result(channel, Result(task.id, result))
except (EnvironmentError, OSError, RuntimeError) as error:
os._exit(error.errno if error.errno else 1)
except EOFError:
os._exit(0) | The worker process routines. |
def _build_signature(self):
sig_contents = \
self.payload + "." + \
b64encode(b"application/xml").decode("ascii") + "." + \
b64encode(b"base64url").decode("ascii") + "." + \
b64encode(b"RSA-SHA256").decode("ascii")
sig_hash = SHA256.new(sig_contents.encode("ascii"))
cipher = PKCS1_v1_5.new(self.private_key)
sig = urlsafe_b64encode(cipher.sign(sig_hash))
key_id = urlsafe_b64encode(bytes(self.author_handle, encoding="utf-8"))
return sig, key_id | Create the signature using the private key. |
def cycle(self, *values):
if not values:
raise ValueError("You must provide values to cycle through")
return values[self.index % len(values)] | Cycle through values as the loop progresses. |
def _processJobsWithRunningServices(self):
while True:
jobGraph = self.serviceManager.getJobGraphWhoseServicesAreRunning(0)
if jobGraph is None:
break
logger.debug('Job: %s has established its services.', jobGraph.jobStoreID)
jobGraph.services = []
self.toilState.updatedJobs.add((jobGraph, 0)) | Get jobs whose services have started |
def calc_parent(self, i, j, h):
N = self.repo.array_size
c_i = i
c_j = j
c_h = h
c_n = c_i // (N ** c_h)
p_n = c_n // N
p_p = c_n % N
p_h = c_h + 1
span = N ** p_h
p_i = p_n * span
p_j = p_i + span
assert p_i <= c_i, 'i greater on parent than child: {}'.format(p_i, p_j)
assert p_j >= c_j, 'j less on parent than child: {}'.format(p_i, p_j)
return p_i, p_j, p_h, p_p | Returns get_big_array and end of span of parent sequence that contains given child. |
def competence(s):
if any([isinstance(s, cls)
for cls in [distributions.Wishart, distributions.WishartCov]]):
return 2
else:
return 0 | The competence function for MatrixMetropolis |
def _name_is_available(self, job_name):
return (False
if [job for job in self.jobs if job.name == job_name]
else True) | Returns Boolean of whether the specified name is already in use. |
def section(self, resources):
section = [p for p in self.parents(resources) if p.rtype == 'section']
if section:
return section[0]
return None | Which section is this in, if any |
def RenderPayload(self, result, value):
if "args_rdf_name" in result:
result["payload_type"] = result["args_rdf_name"]
del result["args_rdf_name"]
if "args" in result:
result["payload"] = self._PassThrough(value.payload)
del result["args"]
return result | Renders GrrMessage payload and renames args_rdf_name field. |
def _from_dict(cls, _dict):
args = {}
if 'query_id' in _dict:
args['query_id'] = _dict.get('query_id')
if 'natural_language_query' in _dict:
args['natural_language_query'] = _dict.get('natural_language_query')
if 'filter' in _dict:
args['filter'] = _dict.get('filter')
if 'examples' in _dict:
args['examples'] = [
TrainingExample._from_dict(x) for x in (_dict.get('examples'))
]
return cls(**args) | Initialize a TrainingQuery object from a json dictionary. |
def _parse_dict_string(self, string, key, default):
try:
for item in string.split(','):
k, v = item.rsplit('=', 1)
if k == key:
try:
return int(v)
except ValueError:
return v
return default
except Exception:
self.log.exception("Cannot parse dictionary string: %s" % string)
return default | Take from a more recent redis.py, parse_info |
def selecteq(table, field, value, complement=False):
return selectop(table, field, value, operator.eq, complement=complement) | Select rows where the given field equals the given value. |
def path_components(path):
def yield_components(path):
chars = zip_longest(path, path[1:])
try:
while True:
c, n = next(chars)
if c != '/':
raise ValueError("Invalid path, expected \"/\"")
elif (n is not None and n != "'"):
raise ValueError("Invalid path, expected \"'\"")
else:
next(chars)
component = []
while True:
c, n = next(chars)
if c == "'" and n == "'":
component += "'"
next(chars)
elif c == "'":
yield "".join(component)
break
else:
component += c
except StopIteration:
return
return list(yield_components(path)) | Convert a path into group and channel name components |
def pretty_const(value):
words = value.split('_')
pretty = words[0].capitalize()
for word in words[1:]:
pretty += ' ' + word.lower()
return pretty | Make a constant pretty for printing in GUI |
def camel_to_snake_case(name):
pattern = r'[A-Z][a-z]+|[A-Z]+(?![a-z])'
return '_'.join(map(str.lower, re.findall(pattern, name))) | Takes a camelCased string and converts to snake_case. |
def epiweeks_in_year(year: int) -> int:
if date_to_epiweek(epiweek_to_date(Epiweek(year, 53))).year == year:
return 53
else:
return 52 | Return number of epiweeks in a year |
def _slice(self, start, end):
if end == start:
return self.__class__()
offset = self._offset
startbyte, newoffset = divmod(start + offset, 8)
endbyte = (end + offset - 1) // 8
bs = self.__class__()
bs._setbytes_unsafe(self._datastore.getbyteslice(startbyte, endbyte + 1), end - start, newoffset)
return bs | Used internally to get a slice, without error checking. |
def FromString(self, string):
if string.lower() in ("false", "no", "n"):
return False
if string.lower() in ("true", "yes", "y"):
return True
raise TypeValueError("%s is not recognized as a boolean value." % string) | Parse a bool from a string. |
def dataframe(self):
if self._dataframe is None:
try:
import pandas as pd
except ImportError:
raise RuntimeError('To enable dataframe support, '
'run \'pip install datadotworld[pandas]\'')
self._dataframe = pd.DataFrame.from_records(self._iter_rows(),
coerce_float=True)
return self._dataframe | Build and cache a dataframe from query results |
def _get_pause(jid, state_id=None):
pause_dir = os.path.join(__opts__['cachedir'], 'state_pause')
pause_path = os.path.join(pause_dir, jid)
if not os.path.exists(pause_dir):
try:
os.makedirs(pause_dir)
except OSError:
pass
data = {}
if state_id is not None:
if state_id not in data:
data[state_id] = {}
if os.path.exists(pause_path):
with salt.utils.files.fopen(pause_path, 'rb') as fp_:
data = salt.utils.msgpack.loads(fp_.read())
return data, pause_path | Return the pause information for a given jid |
def get(self):
if self.gist:
content = self.github.read_gist_file(self.gist)
self.local.save(content) | Reads the remote file from Gist and save it locally |
def from_update(cls, update):
expressions = []
if update.set_expr:
expressions.append(UpdateSetMany.from_clause(update.set_expr))
if update.remove_expr:
expressions.append(UpdateRemove.from_clause(update.remove_expr))
if update.add_expr:
expressions.append(UpdateAdd.from_clause(update.add_expr))
if update.delete_expr:
expressions.append(UpdateDelete.from_clause(update.delete_expr))
return cls(expressions) | Factory for creating an Update expression |
def autosize_fieldname(idfobject):
return [fname for (fname, dct) in zip(idfobject.objls,
idfobject['objidd'])
if 'autosizable' in dct] | return autsizeable field names in idfobject |
def _default_client(jws_client, reactor, key, alg):
if jws_client is None:
pool = HTTPConnectionPool(reactor)
agent = Agent(reactor, pool=pool)
jws_client = JWSClient(HTTPClient(agent=agent), key, alg)
return jws_client | Make a client if we didn't get one. |
def import_data_to_restful_server(args, content):
nni_config = Config(get_config_filename(args))
rest_port = nni_config.get_config('restServerPort')
running, _ = check_rest_server_quick(rest_port)
if running:
response = rest_post(import_data_url(rest_port), content, REST_TIME_OUT)
if response and check_response(response):
return response
else:
print_error('Restful server is not running...')
return None | call restful server to import data to the experiment |
def refresh_token(self, headers=None, **kwargs):
self._check_configuration("site", "token_url", "client_id",
"client_secret")
url = "%s%s" % (self.site, quote(self.token_url))
data = {
'client_id': self.client_id,
'client_secret': self.client_secret,
}
data.update(kwargs)
return self._make_request(url, data=data, headers=headers) | Request a refreshed token |
def _receiverThread(session):
timeOfLastSend = time.time()
timeOfLastRecv = time.time()
try:
while True:
if timeOfLastSend + KEEPALIVE_INTERVAL_SECONDS < time.time():
if timeOfLastRecv + 10 < time.time():
raise exceptions.PingTimeout()
session.socket.send(
b'd1:q18:Admin_asyncEnabled4:txid8:keepalive')
timeOfLastSend = time.time()
try:
data = session.socket.recv(BUFFER_SIZE)
except socket.timeout:
continue
try:
benc = bdecode(data)
except (KeyError, ValueError):
logger.error("error decoding [%s]", data)
continue
if benc['txid'] == 'keepaliv':
if benc['asyncEnabled'] == 0:
raise exceptions.SessionLost()
timeOfLastRecv = time.time()
else:
session.queue.put(benc)
except KeyboardInterrupt:
logger.exception("interrupted")
import thread
thread.interrupt_main() | Receiving messages from cjdns admin server |
def createDocument(self, namespaceURI, localName, doctype=None):
prefix = self._soap_env_prefix
if namespaceURI == self.reserved_ns[prefix]:
qualifiedName = '%s:%s' %(prefix,localName)
elif namespaceURI is localName is None:
self.node = self._dom.createDocument(None,None,None)
return
else:
raise KeyError, 'only support creation of document in %s' %self.reserved_ns[prefix]
document = self._dom.createDocument(nsuri=namespaceURI, qname=qualifiedName, doctype=doctype)
self.node = document.childNodes[0]
for prefix,nsuri in self.reserved_ns.items():
self._setAttributeNS(namespaceURI=self._xmlns_nsuri,
qualifiedName='%s:%s' %(self._xmlns_prefix,prefix),
value=nsuri) | If specified must be a SOAP envelope, else may contruct an empty document. |
def subset_bed_by_chrom(in_file, chrom, data, out_dir=None):
if out_dir is None:
out_dir = os.path.dirname(in_file)
base, ext = os.path.splitext(os.path.basename(in_file))
out_file = os.path.join(out_dir, "%s-%s%s" % (base, chrom, ext))
if not utils.file_uptodate(out_file, in_file):
with file_transaction(data, out_file) as tx_out_file:
_rewrite_bed_with_chrom(in_file, tx_out_file, chrom)
return out_file | Subset a BED file to only have items from the specified chromosome. |
def _set_name(self, name):
self._service_name = name
self.__send_to_frontend({"band": "set_name", "name": self._service_name}) | Set a new name for this service, and notify the frontend accordingly. |
def stage_hub(hub, staging=None):
linknames = []
if staging is None:
staging = tempfile.mkdtemp()
for obj, level in hub.leaves(base.HubComponent, intermediate=True):
linknames.extend(stage(obj, staging))
return staging, linknames | Stage a hub by symlinking all its connected files to a local directory. |
def prepend_line(filepath, line):
with open(filepath) as f:
lines = f.readlines()
lines.insert(0, line)
with open(filepath, 'w') as f:
f.writelines(lines) | Rewrite a file adding a line to its beginning. |
def build_spotify_api():
data = datatools.get_data()
if "spotify_client_id" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_id'")
logger.info("Please add your Spotify client id with name 'spotify_client_id' "
"in data.json to use Spotify features of the music module")
return False
if "spotify_client_secret" not in data["discord"]["keys"]:
logger.warning("No API key found with name 'spotify_client_secret'")
logger.info("Please add your Spotify client secret with name 'spotify_client_secret' "
"in data.json to use Spotify features of the music module")
return False
try:
global spclient
client_credentials_manager = SpotifyClientCredentials(
data["discord"]["keys"]["spotify_client_id"],
data["discord"]["keys"]["spotify_client_secret"])
spclient = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
logger.debug("Spotify build successful")
return True
except Exception as e:
logger.exception(e)
return False | Build the Spotify API for future use |
def appendBPoint(self, type=None, anchor=None, bcpIn=None, bcpOut=None, bPoint=None):
if bPoint is not None:
if type is None:
type = bPoint.type
if anchor is None:
anchor = bPoint.anchor
if bcpIn is None:
bcpIn = bPoint.bcpIn
if bcpOut is None:
bcpOut = bPoint.bcpOut
type = normalizers.normalizeBPointType(type)
anchor = normalizers.normalizeCoordinateTuple(anchor)
if bcpIn is None:
bcpIn = (0, 0)
bcpIn = normalizers.normalizeCoordinateTuple(bcpIn)
if bcpOut is None:
bcpOut = (0, 0)
bcpOut = normalizers.normalizeCoordinateTuple(bcpOut)
self._appendBPoint(type, anchor, bcpIn=bcpIn, bcpOut=bcpOut) | Append a bPoint to the contour. |
def halt(self):
self.status = 'halt'
if hasattr(
self, '_sampling_thread') and self._sampling_thread.isAlive():
print_('Waiting for current iteration to finish...')
while self._sampling_thread.isAlive():
sleep(.1) | Halt a sampling running in another thread. |
def bind(self, queue, exchange, routing_key='', nowait=True, arguments={},
ticket=None, cb=None):
nowait = nowait and self.allow_nowait() and not cb
args = Writer()
args.write_short(ticket or self.default_ticket).\
write_shortstr(queue).\
write_shortstr(exchange).\
write_shortstr(routing_key).\
write_bit(nowait).\
write_table(arguments)
self.send_frame(MethodFrame(self.channel_id, 50, 20, args))
if not nowait:
self._bind_cb.append(cb)
self.channel.add_synchronous_cb(self._recv_bind_ok) | bind to a queue. |
def calc_freefree_snu_ujy(ne, t, width, elongation, dist, ghz):
hz = ghz * 1e9
eta = calc_freefree_eta(ne, t, hz)
kappa = calc_freefree_kappa(ne, t, hz)
snu = calc_snu(eta, kappa, width, elongation, dist)
ujy = snu * cgs.jypercgs * 1e6
return ujy | Calculate a flux density from pure free-free emission. |
def log(self, level, prefix = ''):
logging.log(level, "%sin interface: %s", prefix, self.in_interface)
logging.log(level, "%sout interface: %s", prefix, self.out_interface)
logging.log(level, "%ssource: %s", prefix, self.source)
logging.log(level, "%sdestination: %s", prefix, self.destination)
logging.log(level, "%smatches:", prefix)
for match in self.matches:
match.log(level, prefix + ' ')
if self.jump:
logging.log(level, "%sjump:", prefix)
self.jump.log(level, prefix + ' ') | Writes the contents of the Rule to the logging system. |
def limit(self, r=5):
try:
self.df = self.df[:r]
except Exception as e:
self.err(e, "Can not limit data") | Limit selection to a range in the main dataframe |
def item_selection_changed(self):
is_selection = len(self.selectedItems()) > 0
self.expand_selection_action.setEnabled(is_selection)
self.collapse_selection_action.setEnabled(is_selection) | Item selection has changed |
def _make_repr_table_from_sframe(X):
assert isinstance(X, _SFrame)
column_names = X.column_names()
out_data = [ [None]*len(column_names) for i in range(X.num_rows())]
column_sizes = [len(s) for s in column_names]
for i, c in enumerate(column_names):
for j, e in enumerate(X[c]):
out_data[j][i] = str(e)
column_sizes[i] = max(column_sizes[i], len(e))
out_data = ([ [cn.ljust(k, ' ') for cn, k in zip(column_names, column_sizes)],
["-"*k for k in column_sizes] ]
+ [ [e.ljust(k, ' ') for e, k in zip(row, column_sizes)] for row in out_data] )
return [' '.join(row) for row in out_data] | Serializes an SFrame to a list of strings, that, when printed, creates a well-formatted table. |
def write(self, fptr):
self._write_validate()
orig_pos = fptr.tell()
fptr.write(struct.pack('>I4s', 0, b'dtbl'))
write_buffer = struct.pack('>H', len(self.DR))
fptr.write(write_buffer)
for box in self.DR:
box.write(fptr)
end_pos = fptr.tell()
fptr.seek(orig_pos)
fptr.write(struct.pack('>I', end_pos - orig_pos))
fptr.seek(end_pos) | Write a Data Reference box to file. |
def safe_repr(self, obj):
try:
return repr(obj)
except Exception as e:
return '??? Broken repr (%s: %s)' % (type(e).__name__, e) | Like a repr but without exception |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.