code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
async def enable_user(self, username):
user_facade = client.UserManagerFacade.from_connection(
self.connection())
entity = client.Entity(tag.user(username))
return await user_facade.EnableUser([entity]) | Re-enable a previously disabled user. |
def c_struct(self):
member = '\n'.join(self.c_member_funcs(True))
if self.opts.windll:
return 'struct {{\n{}{} }} {};\n'.format(
self._c_dll_base(), member, self.name
)
return 'typedef\nstruct {2} {{\n{0}\n{1}}}\n{3};\n'.format(
self._c_dll_base(), member, *self._c_struct_names()
) | Get the struct of the module. |
def _handle_factory_method(self, service_obj, method_name,
args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
_check_type('args', args, list)
_check_type('kwargs', kwargs, dict)
new_args = self._replace_scalars_in_args(args)
new_kwargs = self._replace_scalars_in_kwargs(kwargs)
return getattr(service_obj, method_name)(*new_args, **new_kwargs) | Returns an object returned from a factory method |
def _attachChild(self, child):
"attach a child database, returning an identifier for it"
self._childCounter += 1
databaseName = 'child_db_%d' % (self._childCounter,)
self._attachedChildren[databaseName] = child
self.executeSQL("ATTACH DATABASE '%s' AS %s" % (
child.dbdir.child('db.sqlite').path,
databaseName,))
return databaseName | attach a child database, returning an identifier for it |
def _to_datalibrary_safe(fname, gi, folder_name, sample_info, config):
num_tries = 0
max_tries = 5
while 1:
try:
_to_datalibrary(fname, gi, folder_name, sample_info, config)
break
except (simplejson.scanner.JSONDecodeError, bioblend.galaxy.client.ConnectionError) as e:
num_tries += 1
if num_tries > max_tries:
raise
print("Retrying upload, failed with:", str(e))
time.sleep(5) | Upload with retries for intermittent JSON failures. |
def add(parent, idx, value):
if isinstance(parent, dict):
if idx in parent:
raise JSONPatchError("Item already exists")
parent[idx] = value
elif isinstance(parent, list):
if idx == "" or idx == "~":
parent.append(value)
else:
parent.insert(int(idx), value)
else:
raise JSONPathError("Invalid path for operation") | Add a value to a dict. |
def infra_nodes(info, meta, max_pod_cluster, label, key):
nodes = meta.get(label, []) or []
infos = info[info["machine_id"].isin(nodes)]
if infos.empty:
return
return make_response(key, max_pod_cluster=max_pod_cluster, infos=infos,
GREEN=Fore.GREEN, RED=Fore.RED, YELLOW=Fore.YELLOW, NC=Style.RESET_ALL) | Function used to create the response for all infra node types |
def handle_session_cookie(self):
if not self.server.settings['jsessionid']:
return
cookie = self.cookies.get('JSESSIONID')
if not cookie:
cv = 'dummy'
else:
cv = cookie.value
self.set_cookie('JSESSIONID', cv) | Handle JSESSIONID cookie logic |
def main():
conf.init(), db.init(conf.DbPath)
inqueue = LineQueue(sys.stdin).queue
outqueue = type("", (), {"put": lambda self, x: print("\r%s" % x, end=" ")})()
if "--quiet" in sys.argv: outqueue = None
if conf.MouseEnabled: inqueue.put("mouse_start")
if conf.KeyboardEnabled: inqueue.put("keyboard_start")
start(inqueue, outqueue) | Entry point for stand-alone execution. |
def getcloud(site, feed_id=None):
cloudict = fjcache.cache_get(site.id, 'tagclouds')
if not cloudict:
cloudict = cloudata(site)
fjcache.cache_set(site, 'tagclouds', cloudict)
if feed_id:
feed_id = int(feed_id)
if feed_id in cloudict:
return cloudict[feed_id]
return []
return cloudict[0] | Returns the tag cloud for a site or a site's subscriber. |
def _dispatch_change_event(self, object, trait_name, old, new, handler):
args = self.argument_transform(object, trait_name, old, new)
if tnotifier._pre_change_event_tracer is not None:
tnotifier._pre_change_event_tracer(object, trait_name, old, new, handler)
from automate.common import SystemNotReady
try:
self.dispatch(handler, *args)
except SystemNotReady:
pass
except Exception as e:
if tnotifier._post_change_event_tracer is not None:
tnotifier._post_change_event_tracer(object, trait_name, old, new,
handler, exception=e)
tnotifier.handle_exception(object, trait_name, old, new)
else:
if tnotifier._post_change_event_tracer is not None:
tnotifier._post_change_event_tracer(object, trait_name, old, new,
handler, exception=None) | Prepare and dispatch a trait change event to a listener. |
def _compute_mean(self, C, g, mag, hypo_depth, dists, imt):
delta = 0.00750 * 10 ** (0.507 * mag)
if mag < 6.5:
R = np.sqrt(dists.rhypo ** 2 + delta ** 2)
else:
R = np.sqrt(dists.rrup ** 2 + delta ** 2)
mean = (
C['c1'] + C['c2'] * mag +
C['c3'] * R -
C['c4'] * np.log10(R) +
C['c5'] * hypo_depth
)
if imt == PGV():
mean = np.log(10 ** mean)
else:
mean = np.log((10 ** mean) * 1e-2 / g)
return mean | Compute mean according to equation on Table 2, page 2275. |
def _margdist_loglr(self, mf_snr, opt_snr):
mf_snr_marg = mf_snr/self._dist_array
opt_snr_marg = opt_snr/self._dist_array**2
return special.logsumexp(mf_snr_marg - 0.5*opt_snr_marg,
b=self._deltad*self.dist_prior) | Returns the log likelihood ratio marginalized over distance. |
def write_type_dumps(self, operations, preserve_order, output_dir):
by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []}
for operation in operations:
by_type[operation.sql_type].append(operation)
if not preserve_order:
for obj_type, ops in by_type.items():
by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name)
if by_type[SqlType.INDEX]:
self.write_dump('indexes', by_type[SqlType.INDEX], output_dir)
if by_type[SqlType.FUNCTION]:
self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir)
if by_type[SqlType.TRIGGER]:
self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir) | Splits the list of SQL operations by type and dumps these to separate files |
async def join(self, ctx, *, channel: discord.VoiceChannel):
if ctx.voice_client is not None:
return await ctx.voice_client.move_to(channel)
await channel.connect() | Joins a voice channel |
def update_selection_self_prior_condition(self, state_row_iter, sm_selected_model_set, selected_model_list):
selected_path = self.tree_store.get_path(state_row_iter)
tree_model_row = self.tree_store[selected_path]
model = tree_model_row[self.MODEL_STORAGE_ID]
if model in sm_selected_model_set and model not in selected_model_list:
sm_selected_model_set.remove(model)
elif model not in sm_selected_model_set and model in selected_model_list:
sm_selected_model_set.add(model) | Tree view prior update of one model in the state machine selection |
def to_flat(coord):
if coord is None:
return go.N * go.N
return go.N * coord[0] + coord[1] | Converts from a Minigo coordinate to a flattened coordinate. |
def __get_img(self):
with self.fs.open(self.__img_path, 'rb') as fd:
img = PIL.Image.open(fd)
img.load()
return img | Returns an image object corresponding to the page |
def _filter_seqs(fn):
out_file = op.splitext(fn)[0] + "_unique.fa"
idx = 0
if not file_exists(out_file):
with open(out_file, 'w') as out_handle:
with open(fn) as in_handle:
for line in in_handle:
if line.startswith("@") or line.startswith(">"):
fixed_name = _make_unique(line.strip(), idx)
seq = in_handle.next().strip()
counts = _get_freq(fixed_name)
if len(seq) < 26 and (counts > 1 or counts == 0):
idx += 1
print(fixed_name, file=out_handle, end="\n")
print(seq, file=out_handle, end="\n")
if line.startswith("@"):
in_handle.next()
in_handle.next()
return out_file | Convert names of sequences to unique ids |
def getThirdPartyLibCmakeFlags(self, libs):
fmt = PrintingFormat.singleLine()
if libs[0] == '--multiline':
fmt = PrintingFormat.multiLine()
libs = libs[1:]
platformDefaults = True
if libs[0] == '--nodefaults':
platformDefaults = False
libs = libs[1:]
details = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)
CMakeCustomFlags.processLibraryDetails(details)
return details.getCMakeFlags(self.getEngineRoot(), fmt) | Retrieves the CMake invocation flags for building against the Unreal-bundled versions of the specified third-party libraries |
def course(self):
course = self.parent
while course.parent:
course = course.parent
return course | Course this node belongs to |
def getRecommendedRenderTargetSize(self):
fn = self.function_table.getRecommendedRenderTargetSize
pnWidth = c_uint32()
pnHeight = c_uint32()
fn(byref(pnWidth), byref(pnHeight))
return pnWidth.value, pnHeight.value | Suggested size for the intermediate render target that the distortion pulls from. |
def resolve_params(self, text):
" Parse the parameters into a dict. "
params = MultiValueDict()
for line in text.split('\n'):
pair = line.split(':', 1)
if len(pair) == 2:
params.appendlist(pair[0].strip(), pair[1].strip())
return params | Parse the parameters into a dict. |
def remove(self, filename):
output = self.shell('rm', filename)
return False if output else True | Remove file from device |
def match(self, p_todo):
children = self.todolist.children(p_todo)
uncompleted = [todo for todo in children if not todo.is_completed()]
return not uncompleted | Returns True when there are no children that are uncompleted yet. |
async def update_version(self):
get_version = GetVersion(pyvlx=self)
await get_version.do_api_call()
if not get_version.success:
raise PyVLXException("Unable to retrieve version")
self.version = get_version.version
get_protocol_version = GetProtocolVersion(pyvlx=self)
await get_protocol_version.do_api_call()
if not get_protocol_version.success:
raise PyVLXException("Unable to retrieve protocol version")
self.protocol_version = get_protocol_version.version
PYVLXLOG.warning(
"Connected to: %s, protocol version: %s",
self.version, self.protocol_version) | Retrieve version and protocol version from API. |
def _initFilesystemInfo(self):
self._mapFSpathDev = {}
fsinfo = FilesystemInfo()
for fs in fsinfo.getFSlist():
devpath = fsinfo.getFSdev(fs)
dev = self._getUniqueDev(devpath)
if dev is not None:
self._mapFSpathDev[fs] = dev | Initialize filesystem to device mappings. |
def update(self, **kwargs):
for k in self.prior_params:
try:
self.params[k] = kwargs[self.alias[k]]
except(KeyError):
pass | Update `params` values using alias. |
def _get_repo_metadata(self):
cache = salt.cache.Cache(self.opts, self.opts['spm_cache_dir'])
metadata = {}
def _read_metadata(repo, repo_info):
if cache.updated('.', repo) is None:
log.warning('Updating repo metadata')
self._download_repo_metadata({})
metadata[repo] = {
'info': repo_info,
'packages': cache.fetch('.', repo),
}
self._traverse_repos(_read_metadata)
return metadata | Return cached repo metadata |
def from_json_format(conf):
if 'fmode' in conf:
conf['fmode'] = int(conf['fmode'], 8)
if 'dmode' in conf:
conf['dmode'] = int(conf['dmode'], 8) | Convert fields of parsed json dictionary to python format |
def toggle_input(self):
current_index = self.input.currentIndex()
if self.input.itemData(current_index, Qt.UserRole) != (
self.radio_button_enabler):
self.disable_radio_button()
else:
self.enable_radio_button() | Change behaviour of radio button based on input. |
def fire_exception(exc, opts, job=None, node='minion'):
if job is None:
job = {}
event = salt.utils.event.SaltEvent(node, opts=opts, listen=False)
event.fire_event(pack_exception(exc), '_salt_error') | Fire raw exception across the event bus |
def ecdh(self, identity, pubkey):
log.info('please confirm GPG decryption on %s for "%s"...',
self.device, identity.to_string())
with self.device:
return self.device.ecdh(pubkey=pubkey, identity=identity) | Derive shared secret using ECDH from remote public key. |
def html(self, text=TEXT):
self.logger.debug("Generating the HTML report{}..."
.format(["", " (text only)"][text]))
html = []
for piece in self._pieces:
if isinstance(piece, string_types):
html.append(markdown2.markdown(piece, extras=["tables"]))
elif isinstance(piece, Element):
html.append(piece.html())
return "\n\n".join(html) | Generate an HTML file from the report data. |
def _all(self, *args, **kwargs):
data = dict()
data['software'] = self._software(**kwargs)
data['system'] = self._system(**kwargs)
data['services'] = self._services(**kwargs)
try:
data['configuration'] = self._configuration(**kwargs)
except InspectorQueryException as ex:
data['configuration'] = 'N/A'
log.error(ex)
data['payload'] = self._payload(**kwargs) or 'N/A'
return data | Return all the summary of the particular system. |
async def connect(self):
request = stun.Message(message_method=stun.Method.ALLOCATE,
message_class=stun.Class.REQUEST)
request.attributes['LIFETIME'] = self.lifetime
request.attributes['REQUESTED-TRANSPORT'] = UDP_TRANSPORT
try:
response, _ = await self.request(request)
except exceptions.TransactionFailed as e:
response = e.response
if response.attributes['ERROR-CODE'][0] == 401:
self.nonce = response.attributes['NONCE']
self.realm = response.attributes['REALM']
self.integrity_key = make_integrity_key(self.username, self.realm, self.password)
request.transaction_id = random_transaction_id()
response, _ = await self.request(request)
self.relayed_address = response.attributes['XOR-RELAYED-ADDRESS']
logger.info('TURN allocation created %s', self.relayed_address)
self.refresh_handle = asyncio.ensure_future(self.refresh())
return self.relayed_address | Create a TURN allocation. |
def add_cssfile(self, src: str) -> None:
self.head.appendChild(Link(rel='stylesheet', href=src)) | Add CSS file to load at this document's header. |
def sar(computation: BaseComputation) -> None:
shift_length, value = computation.stack_pop(num_items=2, type_hint=constants.UINT256)
value = unsigned_to_signed(value)
if shift_length >= 256:
result = 0 if value >= 0 else constants.UINT_255_NEGATIVE_ONE
else:
result = (value >> shift_length) & constants.UINT_256_MAX
computation.stack_push(result) | Arithmetic bitwise right shift |
def measure_string(self, text, fontname, fontsize, encoding=0):
return _fitz.Tools_measure_string(self, text, fontname, fontsize, encoding) | Measure length of a string for a Base14 font. |
def ignore_failed_logs_action(self, request, queryset):
count = _ignore_failed_logs(queryset)
self.message_user(
request,
_('{count} failed trigger logs marked as ignored.').format(count=count),
) | Set FAILED trigger logs in queryset to IGNORED. |
def require_admin(func):
@wraps(func)
@require_login
def decorated(*args, **kwargs):
user = current_user()
if user and user.is_admin:
return func(*args, **kwargs)
else:
return Response(
'Forbidden', 403
)
return decorated | Requires an admin user to access this resource. |
def use_plenary_sequence_rule_enabler_rule_view(self):
self._object_views['sequence_rule_enabler_rule'] = PLENARY
for session in self._get_provider_sessions():
try:
session.use_plenary_sequence_rule_enabler_rule_view()
except AttributeError:
pass | Pass through to provider SequenceRuleEnablerRuleLookupSession.use_plenary_sequence_rule_enabler_rule_view |
def register_pkg(name, formula_def, conn=None):
close = False
if conn is None:
close = True
conn = init()
conn.execute('INSERT INTO packages VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', (
name,
formula_def['version'],
formula_def['release'],
datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT'),
formula_def.get('os', None),
formula_def.get('os_family', None),
formula_def.get('dependencies', None),
formula_def.get('os_dependencies', None),
formula_def.get('os_family_dependencies', None),
formula_def['summary'],
formula_def['description'],
))
if close:
conn.close() | Register a package in the package database |
def remove_files():
logger.info("Removing local track files that were not downloaded...")
files = [f for f in os.listdir('.') if os.path.isfile(f)]
for f in files:
if f not in fileToKeep:
os.remove(f) | Removes any pre-existing tracks that were not just downloaded |
def finish_experiment(self, session, exp_id):
self.logger.debug('Finishing %s' %exp_id)
experiments = session.get('experiments', [])
experiments = [x for x in experiments if x != exp_id]
session['experiments'] = experiments
return experiments | remove an experiment from the list after completion. |
def load_json_network(json_dict):
network = pyphi.Network.from_json(json_dict['network'])
state = json_dict['state']
return (network, state) | Load a network from a json file |
def addList(self, source_id, dir_path, is_recurcive=False, timestamp_reception=None, training_metadata=[]):
if not path.isdir(dir_path):
raise ValueError(dir_path + ' is not a directory')
files_to_send = _get_files_from_dir(dir_path, is_recurcive)
succeed_upload = {}
failed_upload = {}
for file_path in files_to_send:
try:
resp = self.add(source_id=source_id,
file_path=file_path, profile_reference="",
timestamp_reception=timestamp_reception, training_metadata=training_metadata)
if resp['code'] != 200 and resp['code'] != 201:
failed_upload[file_path] = ValueError('Invalid response: ' + str(resp))
else:
succeed_upload[file_path] = resp
except BaseException as e:
failed_upload[file_path] = e
result = {
'success': succeed_upload,
'fail': failed_upload
}
return result | Add all profile from a given directory. |
def remote():
logger.info("Fetching latest data from PyPI.")
results = defaultdict(list)
packages = PackageVersion.objects.exclude(is_editable=True)
for pv in packages:
pv.update_from_pypi()
results[pv.diff_status].append(pv)
logger.debug("Updated package from PyPI: %r", pv)
results['refreshed_at'] = tz_now()
return results | Update package info from PyPI. |
def named_entity_texts(self):
if not self.is_tagged(NAMED_ENTITIES):
self.tag_named_entities()
return self.texts(NAMED_ENTITIES) | The texts representing named entities. |
def removeHandler(self, event_name):
if event_name not in self.handlers:
raise ValueError('{} is not a valid event'.format(event_name))
self.handlers[event_name] = None | Remove handler for given event. |
def _find_frame(stack, start=0):
frame = inspect.getframeinfo(stack[start][0])
if frame.function == '__init__':
return _find_frame(stack, start + 1)
return frame | Find the frame with the caller on the stack. |
def _clearContents(self):
logger.debug("Clearing inspector contents")
self.titleLabel.setText('')
self.imageItem.clear()
self.imagePlotItem.setLabel('left', '')
self.imagePlotItem.setLabel('bottom', '')
self.histLutItem.setHistogramRange(0, 100)
self.histLutItem.setLevels(0, 100)
self.crossPlotRow, self.crossPlotCol = None, None
self.probeLabel.setText('')
self.crossLineHorizontal.setVisible(False)
self.crossLineVertical.setVisible(False)
self.crossLineHorShadow.setVisible(False)
self.crossLineVerShadow.setVisible(False)
self.horCrossPlotItem.clear()
self.verCrossPlotItem.clear() | Clears the contents when no valid data is available |
def md5hash(self):
digest = hashlib.md5(self.content).digest()
return b64_string(digest) | Return the MD5 hash string of the file content |
def gen_input_add(sig_dic):
if sig_dic['en'] == 'tag_file_download':
html_str = HTML_TPL_DICT['input_add_download'].format(
sig_en=sig_dic['en'],
sig_zh=sig_dic['zh'],
sig_dic=sig_dic['dic'][1],
sig_type=sig_dic['type']
)
else:
html_str = HTML_TPL_DICT['input_add'].format(
sig_en=sig_dic['en'],
sig_zh=sig_dic['zh'],
sig_dic=sig_dic['dic'][1],
sig_type=sig_dic['type']
)
return html_str | Adding for HTML Input control. |
async def get_trans_flags(self) -> 'Flags':
from bernard.middleware import MiddlewareManager
async def make_flags(request: Request) -> 'Flags':
return {}
mf = MiddlewareManager.instance().get('make_trans_flags', make_flags)
return await mf(self) | Gives a chance to middlewares to make the translation flags |
def args(self):
if self._args is None:
parser = self._build_parser()
self._args = parser.parse_args()
return self._args | Parsed command-line arguments. |
def hourly_dew_point(self):
dpt_data = self._humidity_condition.hourly_dew_point_values(
self._dry_bulb_condition)
return self._get_daily_data_collections(
temperature.DewPointTemperature(), 'C', dpt_data) | A data collection containing hourly dew points over they day. |
def run_object_query(client, base_object_query, start_record, limit_to,
verbose=False):
if verbose:
print("[start: %d limit: %d]" % (start_record, limit_to))
start = datetime.datetime.now()
result = client.execute_object_query(
object_query=base_object_query,
start_record=start_record,
limit_to=limit_to)
end = datetime.datetime.now()
if verbose:
print("[%s - %s]" % (start, end))
return result | inline method to take advantage of retry |
def hg_hook(ui, repo, node=None, **kwargs):
seen = set()
paths = []
if len(repo):
for rev in range(repo[node], len(repo)):
for file_ in repo[rev].files():
file_ = op.join(repo.root, file_)
if file_ in seen or not op.exists(file_):
continue
seen.add(file_)
paths.append(file_)
options = parse_options()
setup_logger(options)
if paths:
process_paths(options, candidates=paths) | Run pylama after mercurial commit. |
def lint_stylesheets(context: Context):
args = [
'--config', os.path.join(context.app.common_templates_path, 'mtp_common', 'build_tasks', 'sass-lint.yml'),
'--format', 'stylish',
'--syntax', 'scss',
]
if context.verbosity > 1:
args.append('--verbose')
args.append(os.path.join(context.app.scss_source_path, '**', '*.scss'))
return context.node_tool('sass-lint', *args) | Tests stylesheets for code and style errors |
def list(self, list, prefix=None):
self.indent += 1
for (i, entry) in enumerate(list.content):
for (j, paragraph) in enumerate(entry.content):
prefix = "- " if j == 0 else " "
handler = self.paragraphDispatch[paragraph.__class__]
handler(paragraph, prefix)
self.target.write("\n")
self.indent -= 1 | Process a pyth list into the target |
async def send(self, hittype, *args, **data):
if hittype not in self.valid_hittypes:
raise KeyError('Unsupported Universal Analytics Hit Type: {0}'.format(repr(hittype)))
self.set_timestamp(data)
self.consume_options(data, hittype, args)
for item in args:
if isinstance(item, dict):
for key, val in self.payload(item):
data[key] = val
for k, v in self.params.items():
if k not in data:
data[k] = v
data = dict(self.payload(data))
if self.hash_client_id:
data['cid'] = generate_uuid(data['cid'])
await self.http.send(data) | Transmit HTTP requests to Google Analytics using the measurement protocol |
def merge_results(self, other_processor):
if not isinstance(other_processor, self.__class__):
raise ValueError("Can only extend with another %s instance."
% self.__class__.__name__)
self.statements.extend(other_processor.statements)
if other_processor.statements_sample is not None:
if self.statements_sample is None:
self.statements_sample = other_processor.statements_sample
else:
self.statements_sample.extend(other_processor.statements_sample)
self._merge_json(other_processor.__statement_jsons,
other_processor.__evidence_counts)
return | Merge the results of this processor with those of another. |
def parse(rmk: str) -> RemarksData:
rmkdata = {}
for item in rmk.split(' '):
if len(item) in [5, 9] and item[0] == 'T' and item[1:].isdigit():
rmkdata['temperature_decimal'] = core.make_number(_tdec(item[1:5], None))
rmkdata['dewpoint_decimal'] = core.make_number(_tdec(item[5:], None))
return RemarksData(**rmkdata) | Finds temperature and dewpoint decimal values from the remarks |
def markers_to_events(self, keep_name=False):
markers = self.parent.info.markers
if markers is None:
self.parent.statusBar.showMessage('No markers in dataset.')
return
if not keep_name:
name, ok = self.new_eventtype()
if not ok:
return
else:
name = None
self.annot.add_events(markers, name=name, chan='')
if keep_name:
self.display_eventtype()
n_eventtype = self.idx_eventtype.count()
self.idx_eventtype.setCurrentIndex(n_eventtype - 1)
self.update_annotations() | Copy all markers in dataset to event type. |
def seek(self, pos):
if self.debug:
logging.debug('seek: %r' % pos)
self.fp.seek(pos)
self.bufpos = pos
self.buf = b''
self.charpos = 0
self._parse1 = self._parse_main
self._curtoken = b''
self._curtokenpos = 0
self._tokens = []
return | Seeks the parser to the given position. |
def search(search_text, config=None):
storm_ = get_storm_instance(config)
try:
results = storm_.search_host(search_text)
if len(results) == 0:
print ('no results found.')
if len(results) > 0:
message = 'Listing results for {0}:\n'.format(search_text)
message += "".join(results)
print(message)
except Exception as error:
print(get_formatted_message(str(error), 'error'), file=sys.stderr)
sys.exit(1) | Searches entries by given search text. |
def _is_valid_string(self, inpt, metadata):
if not is_string(inpt):
return False
if metadata.get_minimum_string_length() and len(inpt) < metadata.get_minimum_string_length():
return False
elif metadata.get_maximum_string_length() and len(inpt) > metadata.get_maximum_string_length():
return False
if metadata.get_string_set() and inpt not in metadata.get_string_set():
return False
else:
return True | Checks if input is a valid string |
def load_configs(self, conf_file):
with open(conf_file) as stream:
lines = itertools.chain(("[global]",), stream)
self._config.read_file(lines)
return self._config['global'] | Assumes that the config file does not have any sections, so throw it all in global |
def default(request):
protocol = request.is_secure() and 'wss://' or 'ws://'
heartbeat_msg = settings.WS4REDIS_HEARTBEAT and '"{0}"'.format(settings.WS4REDIS_HEARTBEAT) or 'null'
context = {
'WEBSOCKET_URI': protocol + request.get_host() + settings.WEBSOCKET_URL,
'WS4REDIS_HEARTBEAT': mark_safe(heartbeat_msg),
}
return context | Adds additional context variables to the default context. |
def com_google_fonts_check_font_copyright(ttFont):
import re
from fontbakery.utils import get_name_entry_strings
failed = False
for string in get_name_entry_strings(ttFont, NameID.COPYRIGHT_NOTICE):
does_match = re.search(r'Copyright [0-9]{4} The .* Project Authors \([^\@]*\)',
string)
if does_match:
yield PASS, ("Name Table entry: Copyright field '{}'"
" matches canonical pattern.").format(string)
else:
failed = True
yield FAIL, ("Name Table entry: Copyright notices should match"
" a pattern similar to:"
" 'Copyright 2017 The Familyname"
" Project Authors (git url)'\n"
"But instead we have got:"
" '{}'").format(string)
if not failed:
yield PASS, "Name table copyright entries are good" | Copyright notices match canonical pattern in fonts |
def LogInit():
logging.debug("Initializing client logging subsystem.")
logger = logging.getLogger()
memory_handlers = [
m for m in logger.handlers
if m.__class__.__name__ == "PreLoggingMemoryHandler"
]
logger.handlers = list(GetLogHandlers())
SetLogLevels()
for handler in memory_handlers:
for record in handler.buffer:
logger.handle(record) | Configure the logging subsystem. |
def _valid_folder(self, base, name):
valid = True
fullpath = os.path.join(base, name)
if (
not self.recursive or
(
self.folder_exclude_check is not None and
not self.compare_directory(fullpath[self._base_len:] if self.dir_pathname else name)
)
):
valid = False
if valid and (not self.show_hidden and util.is_hidden(fullpath)):
valid = False
return self.on_validate_directory(base, name) if valid else valid | Return whether a folder can be searched. |
def clear_all(self):
self.injections.clear_all()
for config_file in CONFIG_FILES:
self.injections.clear(os.path.join("~", config_file)) | clear all files that were to be injected |
def _initFeedFuncs(self):
self.setFeedFunc('syn.nodes', self._addSynNodes)
self.setFeedFunc('syn.splice', self._addSynSplice)
self.setFeedFunc('syn.ingest', self._addSynIngest) | Registration for built-in Cortex feed functions. |
def ai(x, context=None):
return _apply_function_in_current_context(
BigFloat,
mpfr.mpfr_ai,
(BigFloat._implicit_convert(x),),
context,
) | Return the Airy function of x. |
def retrieve(self, request, project, pk=None):
log = JobLog.objects.get(id=pk)
return Response(self._log_as_dict(log)) | Returns a job_log_url object given its ID |
def validate(self, ymldata=None, messages=None):
schema_val = self.schema_val(messages)
if len(messages) == 0:
content_val = self.content_val(ymldata, messages)
return schema_val and content_val | Validates the Telemetry Dictionary definitions |
def enum(cls, options, values):
names, real = zip(*options)
del names
def factory(i, name):
return cls(i, name, (len(real),), lambda a: real[a[0]], values)
return factory | Create an ArgumentType where you choose one of a set of known values. |
def handle_translocation_illegal(self, line: str, position: int, tokens: ParseResults) -> None:
raise MalformedTranslocationWarning(self.get_line_number(), line, position, tokens) | Handle a malformed translocation. |
def from_string(source, args=None):
if _has_jinja:
logger.info('Precompiling model with arguments: {}'.format(args))
return _jenv.from_string(source).render(args or {})
if args:
raise RuntimeError(_except_text)
return source | Renders a template string |
def parse_metadata(self):
all_models = defaultdict(list)
with open(self.metadata_index_json) as f:
loaded = json.load(f)
for m in loaded['index']:
all_models[m['uniprot_ac']].append(m)
self.all_models = dict(all_models) | Parse the INDEX_JSON file and reorganize it as a dictionary of lists. |
def rule_expand(component, text):
global rline_mpstate
if component[0] == '<' and component[-1] == '>':
return component[1:-1].split('|')
if component in rline_mpstate.completion_functions:
return rline_mpstate.completion_functions[component](text)
return [component] | expand one rule component |
def translate(self, instr):
try:
translator = self._instr_translators[instr.mnemonic]
return translator(*instr.operands)
except Exception:
logger.error("Failed to translate instruction: %s", instr, exc_info=True)
raise | Return the SMT representation of a REIL instruction. |
def raise_for_response(self, responses):
exception_messages = [self.client.format_exception_message(response) for response in responses]
if len(exception_messages) == 1:
message = exception_messages[0]
else:
message = "[%s]" % ", ".join(exception_messages)
raise PostmarkerException(message) | Constructs appropriate exception from list of responses and raises it. |
def run(self, tasks=None, timeout=None):
timeout = self._timeout if timeout is None else timeout
if self.async_running or self.loop.is_running():
return self.wait_all_tasks_done(timeout)
else:
tasks = tasks or self.todo_tasks
return self.loop.run_until_complete(asyncio.gather(*tasks, loop=self.loop)) | Block, run loop until all tasks completed. |
def lipid_box(self):
if self._lipid_box:
return self._lipid_box
else:
self._lipid_box = self.lipid_components.boundingbox
self._lipid_box.mins -= np.array([0.5*np.sqrt(self.apl),
0.5*np.sqrt(self.apl),
0.5*np.sqrt(self.apl)])
self._lipid_box.maxs += np.array([0.5*np.sqrt(self.apl),
0.5*np.sqrt(self.apl),
0.5*np.sqrt(self.apl)])
return self._lipid_box | The box containing all of the lipids. |
def does_external_program_run(prog, verbose):
try:
with open('/dev/null') as null:
subprocess.call([prog, '-h'], stdout=null, stderr=null)
result = True
except OSError:
if verbose > 1:
print("couldn't run {}".format(prog))
result = False
return result | Test to see if the external programs can be run. |
def conf_files(self):
for attr in dir(self):
field = getattr(self, attr)
if isinstance(field, ConfFile):
yield field | List of configuration files for this module |
def lex(self, text):
for match in self.regex.finditer(text):
name = match.lastgroup
yield (name, match.group(name)) | Iterator that tokenizes `text` and yields up tokens as they are found |
def send(self, message, callback=None):
assert isinstance(message, velbus.Message)
self._write_queue.put_nowait((message, callback)) | Add message to write queue. |
def spi_write(self, data):
data_out = array.array('B', data)
data_in = array.array('B', (0,) * len(data_out))
ret = api.py_aa_spi_write(self.handle, len(data_out), data_out,
len(data_in), data_in)
_raise_error_if_negative(ret)
return bytes(data_in) | Write a stream of bytes to a SPI device. |
def cli(env, identifier, enabled, port, weight, healthcheck_type, ip_address):
mgr = SoftLayer.LoadBalancerManager(env.client)
loadbal_id, service_id = loadbal.parse_id(identifier)
if ((not any([ip_address, weight, port, healthcheck_type])) and
enabled is None):
raise exceptions.CLIAbort(
'At least one property is required to be changed!')
ip_address_id = None
if ip_address:
ip_service = env.client['Network_Subnet_IpAddress']
ip_record = ip_service.getByIpAddress(ip_address)
ip_address_id = ip_record['id']
mgr.edit_service(loadbal_id,
service_id,
ip_address_id=ip_address_id,
enabled=enabled,
port=port,
weight=weight,
hc_type=healthcheck_type)
env.fout('Load balancer service %s is being modified!' % identifier) | Edit the properties of a service group. |
def printNetwork(network):
print "The network has",len(network.regions.values()),"regions"
for p in range(network.getMaxPhase()):
print "=== Phase",p
for region in network.regions.values():
if network.getPhases(region.name)[0] == p:
print " ",region.name | Given a network, print out regions sorted by phase |
def write_output(self, data, filename=None, args=None):
if args:
if not args.linejson:
return 0
if not filename: filename = args.linejson
entrylist = []
for entry in data['entries']:
entrystring = json.dumps(entry, sort_keys=True)
entrylist.append(entrystring)
with open(str(filename), 'w') as output_file:
output_file.write('\n'.join(entrylist)) | Write log data to a file with one JSON object per line |
def ref_context_from_geoloc(geoloc):
text = geoloc.get('text')
geoid = geoloc.get('geoID')
rc = RefContext(name=text, db_refs={'GEOID': geoid})
return rc | Return a RefContext object given a geoloc entry. |
def ms_zoom(self, viewer, event, data_x, data_y, msg=True):
if not self.canzoom:
return True
msg = self.settings.get('msg_zoom', msg)
x, y = self.get_win_xy(viewer)
if event.state == 'move':
self._zoom_xy(viewer, x, y)
elif event.state == 'down':
if msg:
viewer.onscreen_message("Zoom (drag mouse L-R)",
delay=1.0)
self._start_x, self._start_y = x, y
else:
viewer.onscreen_message(None)
return True | Zoom the image by dragging the cursor left or right. |
def deserialize_upload(value, url):
result = {'name': None, 'storage': None}
try:
result = signing.loads(value, salt=url)
except signing.BadSignature:
pass
else:
try:
result['storage'] = get_storage_class(result['storage'])
except (ImproperlyConfigured, ImportError):
result = {'name': None, 'storage': None}
return result | Restore file and name and storage from serialized value and the upload url. |
def add_ppas_from_file(file_name, update=True):
for ppa in _read_lines_from_file(file_name):
add_ppa(ppa, update=False)
if update:
update_apt_sources() | Add personal package archive from a file list. |
def validate(self):
validator = Draft4Validator(self.SCHEMA)
if not validator.is_valid(self.config):
for err in validator.iter_errors(self.config):
LOGGER.error(str(err.message))
validator.validate(self.config) | Validate the configuration file. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.