code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def create_shn (archive, compression, cmd, verbosity, interactive, filenames):
if len(filenames) > 1:
raise util.PatoolError("multiple filenames for shorten not supported")
cmdlist = [util.shell_quote(cmd)]
cmdlist.extend(['-', util.shell_quote(archive), '<',
util.shell_quote(filenames[0])])
return (cmdlist, {'shell': True}) | Compress a WAV file to a SHN archive. |
def shutdown(self, service_thread_map):
with self._services.lifecycle_lock:
for service, service_thread in service_thread_map.items():
self._logger.info('terminating pantsd service: {}'.format(service))
service.terminate()
service_thread.join(self.JOIN_TIMEOUT_SECONDS)
self._logger.info('terminating pantsd')
self._kill_switch.set() | Gracefully terminate all services and kill the main PantsDaemon loop. |
def rename_key(pki_dir, id_, new_id):
oldkey = os.path.join(pki_dir, 'minions', id_)
newkey = os.path.join(pki_dir, 'minions', new_id)
if os.path.isfile(oldkey):
os.rename(oldkey, newkey) | Rename a key, when an instance has also been renamed |
def search_people_by_bio(query, limit_results=DEFAULT_LIMIT,
index=['onename_people_index']):
from pyes import QueryStringQuery, ES
conn = ES()
q = QueryStringQuery(query,
search_fields=['username', 'profile_bio'],
default_operator='and')
results = conn.search(query=q, size=20, indices=index)
count = conn.count(query=q)
count = count.count
if(count == 0):
q = QueryStringQuery(query,
search_fields=['username', 'profile_bio'],
default_operator='or')
results = conn.search(query=q, size=20, indices=index)
results_list = []
counter = 0
for profile in results:
username = profile['username']
results_list.append(username)
counter += 1
if(counter == limit_results):
break
return results_list | queries lucene index to find a nearest match, output is profile username |
def mount(self, url, app):
"Mount a sub-app at the url of current app."
app.url = url
self.mounts.append(app) | Mount a sub-app at the url of current app. |
def create_topic(self, topic_name, topic_config):
topic_subs = []
t = self.template
if "Subscription" in topic_config:
topic_subs = topic_config["Subscription"]
t.add_resource(
sns.Topic.from_dict(
topic_name,
topic_config
)
)
topic_arn = Ref(topic_name)
t.add_output(
Output(topic_name + "Name", Value=GetAtt(topic_name, "TopicName"))
)
t.add_output(Output(topic_name + "Arn", Value=topic_arn))
sqs_subs = [sub for sub in topic_subs if sub["Protocol"] == "sqs"]
if sqs_subs:
self.create_sqs_policy(topic_name, topic_arn, sqs_subs) | Creates the SNS topic, along with any subscriptions requested. |
def global_get(self, key):
key = self.pack(key)
r = self.sql('global_get', key).fetchone()
if r is None:
raise KeyError("Not set")
return self.unpack(r[0]) | Return the value for the given key in the ``globals`` table. |
def encode_token(self, token):
key = current_app.secret_key
if key is None:
raise RuntimeError(
"please set app.secret_key before generate token")
return jwt.encode(token, key, algorithm=self.config["algorithm"]) | Encode Authorization token, return bytes token |
def directory(self):
if self._directory is None:
self._directory = self.api._load_directory(self.cid)
return self._directory | Directory that holds this file |
def format_vk(vk):
for ext in get_extensions_filtered(vk):
req = ext['require']
if not isinstance(req, list):
ext['require'] = [req] | Format vk before using it |
def deploy(remote, assets_to_s3):
header("Deploying...")
if assets_to_s3:
for mod in get_deploy_assets2s3_list(CWD):
_assets2s3(mod)
remote_name = remote or "ALL"
print("Pushing application's content to remote: %s " % remote_name)
hosts = get_deploy_hosts_list(CWD, remote or None)
git_push_to_master(cwd=CWD, hosts=hosts, name=remote_name)
print("Done!") | To DEPLOY your application |
def align(self,inputwords, outputwords):
alignment = []
cursor = 0
for inputword in inputwords:
if len(outputwords) > cursor and outputwords[cursor] == inputword:
alignment.append(cursor)
cursor += 1
elif len(outputwords) > cursor+1 and outputwords[cursor+1] == inputword:
alignment.append(cursor+1)
cursor += 2
else:
alignment.append(None)
cursor += 1
return alignment | For each inputword, provides the index of the outputword |
def _persisted_last_epoch(self) -> int:
epoch_number = 0
self._make_sure_dir_exists()
for x in os.listdir(self.model_config.checkpoint_dir()):
match = re.match('checkpoint_(\\d+)\\.data', x)
if match:
idx = int(match[1])
if idx > epoch_number:
epoch_number = idx
return epoch_number | Return number of last epoch already calculated |
def setup(app):
app.connect("builder-inited", build_configuration_parameters)
app.connect("autodoc-skip-member", skip_slots)
app.add_stylesheet("css/custom.css") | Map methods to states of the documentation build. |
def save_hdf(self, filename, path='', overwrite=False):
if os.path.exists(filename) and overwrite:
os.remove(filename)
for pop in self.poplist:
name = pop.modelshort
pop.save_hdf(filename, path='{}/{}'.format(path,name), append=True) | Saves PopulationSet to HDF file. |
def render_flatpage(request, f):
if f.registration_required and not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.path)
if f.template_name:
t = loader.select_template((f.template_name, DEFAULT_TEMPLATE))
else:
t = loader.get_template(DEFAULT_TEMPLATE)
f.title = mark_safe(f.title)
f.content = mark_safe(f.content)
response = HttpResponse(t.render({
'flatpage': f
}, request))
try:
from django.core.xheaders import populate_xheaders
populate_xheaders(request, response, FlatPage_i18n, f.id)
except ImportError:
pass
return response | Internal interface to the flat page view. |
def addAnnotationsSearchOptions(parser):
addAnnotationSetIdArgument(parser)
addReferenceNameArgument(parser)
addReferenceIdArgument(parser)
addStartArgument(parser)
addEndArgument(parser)
addEffectsArgument(parser)
addPageSizeArgument(parser) | Adds common options to a annotation searches command line parser. |
def step_an_empty_file_named_filename(context, filename):
assert not os.path.isabs(filename)
command_util.ensure_workdir_exists(context)
filename2 = os.path.join(context.workdir, filename)
pathutil.create_textfile_with_contents(filename2, "") | Creates an empty file. |
def open_dataset(self, service):
if not self.dataset:
path = os.path.join(SERVICE_DATA_ROOT, service.data_path)
self.dataset = netCDF4.Dataset(path, 'r')
return self.dataset | Opens and returns the NetCDF dataset associated with a service, or returns a previously-opened dataset |
def create_exclude_rules(args):
global _cached_exclude_rules
if _cached_exclude_rules is not None:
return _cached_exclude_rules
rules = []
for excl_path in args.exclude:
abspath = os.path.abspath(os.path.join(args.root, excl_path))
rules.append((abspath, True))
for incl_path in args.include:
abspath = os.path.abspath(os.path.join(args.root, incl_path))
rules.append((abspath, False))
_cached_exclude_rules = sorted(rules, key=lambda p: p[0])
return _cached_exclude_rules | Creates the exlude rules |
def getTextTitle(self):
request_id = self.getRequestID()
if not request_id:
return ""
analysis = self.getAnalysis()
if not analysis:
return request_id
return "%s - %s" % (request_id, analysis.Title()) | Return a title for texts and listings |
def draw_variables(self):
z = self.q[0].draw_variable_local(self.sims)
for i in range(1,len(self.q)):
z = np.vstack((z,self.q[i].draw_variable_local(self.sims)))
return z | Draw parameters from the approximating distributions |
def ReadAllFlowRequestsAndResponses(self, client_id, flow_id):
flow_key = (client_id, flow_id)
try:
self.flows[flow_key]
except KeyError:
return []
request_dict = self.flow_requests.get(flow_key, {})
response_dict = self.flow_responses.get(flow_key, {})
res = []
for request_id in sorted(request_dict):
res.append((request_dict[request_id], response_dict.get(request_id, {})))
return res | Reads all requests and responses for a given flow from the database. |
def repo(name: str, owner: str) -> snug.Query[dict]:
request = snug.GET(f'https://api.github.com/repos/{owner}/{name}')
response = yield request
return json.loads(response.content) | a repo lookup by owner and name |
def save_sequence_rule(self, sequence_rule_form, *args, **kwargs):
if sequence_rule_form.is_for_update():
return self.update_sequence_rule(sequence_rule_form, *args, **kwargs)
else:
return self.create_sequence_rule(sequence_rule_form, *args, **kwargs) | Pass through to provider SequenceRuleAdminSession.update_sequence_rule |
def _event(self, event):
result = dict(
pid=event.device_id,
tid=event.resource_id,
name=event.name,
ts=event.timestamp_ps / 1000000.0)
if event.duration_ps:
result['ph'] = _TYPE_COMPLETE
result['dur'] = event.duration_ps / 1000000.0
else:
result['ph'] = _TYPE_INSTANT
result['s'] = _SCOPE_THREAD
for key in dict(event.args):
if 'args' not in result:
result['args'] = {}
result['args'][key] = event.args[key]
return result | Converts a TraceEvent proto into a catapult trace event python value. |
def cfnumber_to_number(cfnumber):
numeric_type = cf.CFNumberGetType(cfnumber)
cfnum_to_ctype = {kCFNumberSInt8Type: c_int8, kCFNumberSInt16Type: c_int16,
kCFNumberSInt32Type: c_int32,
kCFNumberSInt64Type: c_int64,
kCFNumberFloat32Type: c_float,
kCFNumberFloat64Type: c_double,
kCFNumberCharType: c_byte, kCFNumberShortType: c_short,
kCFNumberIntType: c_int, kCFNumberLongType: c_long,
kCFNumberLongLongType: c_longlong,
kCFNumberFloatType: c_float,
kCFNumberDoubleType: c_double,
kCFNumberCFIndexType: CFIndex,
kCFNumberCGFloatType: CGFloat}
if numeric_type in cfnum_to_ctype:
t = cfnum_to_ctype[numeric_type]
result = t()
if cf.CFNumberGetValue(cfnumber, numeric_type, byref(result)):
return result.value
else:
raise Exception(
'cfnumber_to_number: unhandled CFNumber type %d' % numeric_type) | Convert CFNumber to python int or float. |
def run(self):
self.info_log("The test batch is ready.")
self.executed_tests = []
for test in self.tests:
localhost_instance = LocalhostInstance(
runner=self,
browser_config=self.browser_config,
test_name=test.Test.name
)
localhost_instance.startup()
with DbSessionContext(BROME_CONFIG['database']['mongo_database_name']) as session:
test_batch = session.query(Testbatch)\
.filter(Testbatch.mongo_id == self.test_batch_id).one()
test_batch.total_executing_tests = 1
session.save(test_batch, safe=True)
test_ = test.Test(
runner=self,
browser_config=self.browser_config,
name=test.Test.name,
test_batch_id=self.test_batch_id,
localhost_instance=localhost_instance,
index=1
)
test_.execute()
self.executed_tests.append(test_)
localhost_instance.tear_down() | Run the test batch |
def returns_json(func):
def decorator(*args, **kwargs):
instance = args[0]
request = getattr(instance, 'request', None)
request.response.setHeader("Content-Type", "application/json")
result = func(*args, **kwargs)
return json.dumps(result)
return decorator | Decorator for functions which return JSON |
def _reset_i(self, i):
self.count[i].value=0
log.debug("reset counter %s", i)
self.lock[i].acquire()
for x in range(self.q[i].qsize()):
self.q[i].get()
self.lock[i].release()
self.start_time[i].value = time.time() | reset i-th progress information |
def collect_github_config():
github_config = {}
for field in ["user", "token"]:
try:
github_config[field] = subprocess.check_output(["git", "config", "github.{}".format(field)]).decode('utf-8').strip()
except (OSError, subprocess.CalledProcessError):
pass
return github_config | Try load Github configuration such as usernames from the local or global git config |
def create_groups(orientations, *groups, **kwargs):
grouped = []
if kwargs.pop('copy', True):
orientations = [copy(o) for o in orientations]
for o in orientations:
o.member_of = None
try:
grouped += o.members
for a in o.members:
a.member_of = o
except AttributeError:
pass
def find(uid):
try:
val = next(x for x in orientations if x.hash == uid)
if val in grouped:
raise GroupedPlaneError("{} is already in a group."
.format(val.hash))
return val
except StopIteration:
raise KeyError("No measurement of with hash {} found"
.format(uid))
for uid_list in groups:
vals = [find(uid) for uid in uid_list]
o = GroupedOrientation(*vals, **kwargs)
orientations.append(o)
return orientations | Create groups of an orientation measurement dataset |
def _no_mute_on_stop_playback(self):
if self.ctrl_c_pressed:
return
if self.isPlaying():
if self.actual_volume == -1:
self._get_volume()
while self.actual_volume == -1:
pass
if self.actual_volume == 0:
self.actual_volume = int(self.max_volume*0.25)
self._sendCommand('volume {}\n'.format(self.actual_volume))
if logger.isEnabledFor(logging.DEBUG):
logger.debug('Unmuting VLC on exit: {} (25%)'.format(self.actual_volume))
elif self.muted:
if self.actual_volume > 0:
self._sendCommand('volume {}\n'.format(self.actual_volume))
if logger.isEnabledFor(logging.DEBUG):
logger.debug('VLC volume restored on exit: {0} ({1}%)'.format(self.actual_volume, int(100 * self.actual_volume / self.max_volume)))
self.show_volume = True | make sure vlc does not stop muted |
def _convert_listlike(arg, unit='ns', box=True, errors='raise', name=None):
if isinstance(arg, (list, tuple)) or not hasattr(arg, 'dtype'):
arg = np.array(list(arg), dtype=object)
try:
value = sequence_to_td64ns(arg, unit=unit,
errors=errors, copy=False)[0]
except ValueError:
if errors == 'ignore':
return arg
else:
raise
if box:
from pandas import TimedeltaIndex
value = TimedeltaIndex(value, unit='ns', name=name)
return value | Convert a list of objects to a timedelta index object. |
def _get_subparser_cell_args(self, subparser_prog):
subparsers = self._get_subparsers()
for subparser in subparsers:
if subparser_prog == subparser.prog:
return subparser._cell_args
return None | Get cell args of a specified subparser by its prog. |
def __connect():
global redis_instance
if use_tcp_socket:
redis_instance = redis.StrictRedis(host=hostname, port=port)
else:
redis_instance = redis.StrictRedis(unix_socket_path=unix_socket) | Connect to a redis instance. |
def generate_seed(seed):
if seed is None:
random.seed()
seed = random.randint(0, sys.maxsize)
random.seed(a=seed)
return seed | Generate seed for random number generator |
def _stop_cpulimit(self):
if self._cpulimit_process and self._cpulimit_process.returncode is None:
self._cpulimit_process.kill()
try:
self._process.wait(3)
except subprocess.TimeoutExpired:
log.error("Could not kill cpulimit process {}".format(self._cpulimit_process.pid)) | Stops the cpulimit process. |
def fail(p_queue, host=None):
if host is not None:
return _path(_c.FSQ_FAIL, root=_path(host, root=hosts(p_queue)))
return _path(p_queue, _c.FSQ_FAIL) | Construct a path to the fail dir for a queue |
def internal2external(xi, bounds):
xe = np.empty_like(xi)
for i, (v, bound) in enumerate(zip(xi, bounds)):
a = bound[0]
b = bound[1]
if a == None and b == None:
xe[i] = v
elif b == None:
xe[i] = a - 1. + np.sqrt(v ** 2. + 1.)
elif a == None:
xe[i] = b + 1. - np.sqrt(v ** 2. + 1.)
else:
xe[i] = a + ((b - a) / 2.) * (np.sin(v) + 1.)
return xe | Convert a series of internal variables to external variables |
def log_request(self, code="-", size="-"):
self._service.log(logging.DEBUG, '"%s" %s', self.requestline, code) | Logs a request to the server |
def _results(self, connection, msgid):
try:
kind, results = connection.result(msgid)
if kind != ldap.RES_SEARCH_RESULT:
results = []
except ldap.LDAPError as e:
results = []
logger.error(u"result(%d) raised %s" % (msgid, pprint.pformat(e)))
return self._process_results(results) | Returns the result of a previous asynchronous query. |
def load(self):
try:
merged_configfile = self.get_merged_config()
self.yamldocs = yaml.load(merged_configfile, Loader=Loader)
self.yamldocs = [x for x in self.yamldocs if x]
self.logdebug('parsed_rules:\n%s\n' % pretty(self.yamldocs))
except (yaml.scanner.ScannerError, yaml.parser.ParserError):
self.raise_and_log_error(ConfigError, 'error parsing config.') | Load our config, log and raise on error. |
def makeicons(source):
im = Image.open(source)
for name, (_, w, h, func) in icon_sizes.iteritems():
print('Making icon %s...' % name)
tn = func(im, (w, h))
bg = Image.new('RGBA', (w, h), (255, 255, 255))
x = (w / 2) - (tn.size[0] / 2)
y = (h / 2) - (tn.size[1] / 2)
bg.paste(tn, (x, y))
bg.save(path.join(env.dir, name)) | Create all the neccessary icons from source image |
def _format_lat(self, lat):
if self.ppd in [4, 16, 64, 128]:
return None
else:
if lat < 0:
return map(lambda x: "{0:0>2}"
.format(int(np.abs(x))) + 'S', self._map_center('lat', lat))
else:
return map(lambda x: "{0:0>2}"
.format(int(x)) + 'N', self._map_center('lat', lat)) | Returned a formated latitude format for the file |
async def _cancel_payloads(self):
for task in self._tasks:
task.cancel()
await asyncio.sleep(0)
for task in self._tasks:
while not task.done():
await asyncio.sleep(0.1)
task.cancel() | Cancel all remaining payloads |
def format(self, record):
data = record.__dict__.copy()
data['data_id'] = DATA['id']
data['data_location_id'] = DATA_LOCATION['id']
data['hostname'] = socket.gethostname()
data['pathname'] = os.path.relpath(data['pathname'], os.path.dirname(__file__))
data['exc_info'] = None
data['msg'] = str(data['msg'])
return json.dumps(data) | Dump the record to JSON. |
def clear(self):
self.command(c.LCD_CLEARDISPLAY)
self._cursor_pos = (0, 0)
self._content = [[0x20] * self.lcd.cols for _ in range(self.lcd.rows)]
c.msleep(2) | Overwrite display with blank characters and reset cursor position. |
def _merge_list_fastqs(files, out_file, config):
if not all(map(fastq.is_fastq, files)):
raise ValueError("Not all of the files to merge are fastq files: %s " % (files))
assert all(map(utils.file_exists, files)), ("Not all of the files to merge "
"exist: %s" % (files))
if not file_exists(out_file):
files = [_gzip_fastq(fn) for fn in files]
if len(files) == 1:
if "remove_source" in config and config["remove_source"]:
shutil.move(files[0], out_file)
else:
os.symlink(files[0], out_file)
return out_file
with file_transaction(out_file) as file_txt_out:
files_str = " ".join(list(files))
cmd = "cat {files_str} > {file_txt_out}".format(**locals())
do.run(cmd, "merge fastq files %s" % files)
return out_file | merge list of fastq files into one |
def _log_request(request):
logger.debug("Inbound email received")
for k, v in list(request.POST.items()):
logger.debug("- POST['%s']='%s'" % (k, v))
for n, f in list(request.FILES.items()):
logger.debug("- FILES['%s']: '%s', %sB", n, f.content_type, f.size) | Helper function to dump out debug info. |
def merge(self, n):
if os.path.isfile(n[:-4]):
old = Utils().read_file(n[:-4]).splitlines()
if os.path.isfile(n):
new = Utils().read_file(n).splitlines()
with open(n[:-4], "w") as out:
for l1, l2 in itertools.izip_longest(old, new):
if l1 is None:
l1 = ""
if l2 is None:
l2 = ""
if l1 != l2:
out.write(l2 + "\n")
else:
out.write(l1 + "\n")
print("The file {0} merged in file {1}".format(
n.split("/")[-1], n[:-4].split("/")[-1])) | Merge new file into old |
def wrtxt_hier(self, fout_txt):
with open(fout_txt, 'wb') as prt:
self.prt_hier(prt)
print(" WROTE: {TXT}".format(TXT=fout_txt)) | Write hierarchy below specfied GO IDs to an ASCII file. |
def log_prior(self):
for p, b in zip(self.parameter_vector, self.parameter_bounds):
if b[0] is not None and p < b[0]:
return -np.inf
if b[1] is not None and p > b[1]:
return -np.inf
return 0.0 | Compute the log prior probability of the current parameters |
def element(self):
element = self.keywords["VRHFIN"].split(":")[0].strip()
try:
return Element(element).symbol
except ValueError:
if element == "X":
return "Xe"
return Element(self.symbol.split("_")[0]).symbol | Attempt to return the atomic symbol based on the VRHFIN keyword. |
def register_plugin_module(mod):
for k, v in load_plugins_from_module(mod).items():
if k:
if isinstance(k, (list, tuple)):
k = k[0]
global_registry[k] = v | Find plugins in given module |
def center_origin(self):
self.set_origin(Vector2(self.image.get_width() / 2.0, self.image.get_height() / 2.0)) | Sets the origin to the center of the image. |
def assign_tip_labels_and_colors(self):
"assign tip labels based on user provided kwargs"
if self.style.tip_labels_colors:
if self.ttree._fixed_order:
if isinstance(self.style.tip_labels_colors, (list, np.ndarray)):
cols = np.array(self.style.tip_labels_colors)
orde = cols[self.ttree._fixed_idx]
self.style.tip_labels_colors = list(orde)
if self.style.tip_labels is False:
self.style.tip_labels_style["-toyplot-anchor-shift"] = "0px"
self.tip_labels = ["" for i in self.ttree.get_tip_labels()]
else:
if not self.style.tip_labels_style["-toyplot-anchor-shift"]:
self.style.tip_labels_style["-toyplot-anchor-shift"] = "15px"
if isinstance(self.style.tip_labels, list):
self.tip_labels = self.style.tip_labels
else:
if self.ttree._fixed_order:
self.tip_labels = self.ttree._fixed_order
else:
self.tip_labels = self.ttree.get_tip_labels() | assign tip labels based on user provided kwargs |
def update_firewall_rule(self, firewall_rule, body=None):
return self.put(self.firewall_rule_path % (firewall_rule), body=body) | Updates a firewall rule. |
def register(self):
group = cfg.OptGroup(
self.group_name,
title="HNV (Hyper-V Network Virtualization) Options")
self._config.register_group(group)
self._config.register_opts(self._options, group=group) | Register the current options to the global ConfigOpts object. |
def combine_keys(pks: Iterable[Ed25519PublicPoint]) -> Ed25519PublicPoint:
P = [_ed25519.decodepoint(pk) for pk in pks]
combine = reduce(_ed25519.edwards_add, P)
return Ed25519PublicPoint(_ed25519.encodepoint(combine)) | Combine a list of Ed25519 points into a "global" CoSi key. |
def _expand_data(self, old_data, new_data, group):
for file in old_data:
if file:
extension = file.split(".")[-1].lower()
if extension in self.file_types.keys():
new_data['groups'][group].append(self._expand_one_file(normpath(file),
new_data, extension))
else:
logger.debug("Filetype for file %s not recognized" % file)
if hasattr(self, '_expand_sort_key'):
new_data['groups'][group] = sorted(new_data['groups'][group],
key=self._expand_sort_key) | data expansion - uvision needs filename and path separately. |
def var(self):
if self._var is None:
self._var = symbol.var(self.name, shape=self.shape, dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
return self._var | Returns a symbol representing this parameter. |
def geom_wh(geom):
e = geom.GetEnvelope()
h = e[1] - e[0]
w = e[3] - e[2]
return w, h | Compute width and height of geometry in projected units |
def search_mosaics(name, bbox, rbox, limit, pretty):
bbox = bbox or rbox
cl = clientv1()
mosaic, = cl.get_mosaic_by_name(name).items_iter(1)
response = call_and_wrap(cl.get_quads, mosaic, bbox)
echo_json_response(response, pretty, limit) | Get quad IDs and information for a mosaic |
def _create_fw(self, tenant_id, data):
LOG.debug("In creating Native FW data is %s", data)
ret, in_sub, out_sub = self.attach_intf_router(tenant_id,
data.get('tenant_name'),
data.get('router_id'))
if not ret:
LOG.error("Native FW: Attach intf router failed for tenant "
"%s", tenant_id)
return False
self.create_tenant_dict(tenant_id, data.get('router_id'))
arg_dict = self._create_arg_dict(tenant_id, data, in_sub, out_sub)
ret = self.update_dcnm_partition_static_route(tenant_id, arg_dict)
if not ret:
return False
ret = self.program_default_gw(tenant_id, arg_dict)
if not ret:
return False
ret = self.program_next_hop(tenant_id, arg_dict)
if not ret:
return False
ret = self.send_in_router_port_msg(tenant_id, arg_dict, 'up')
if not ret:
return False
return self.send_out_router_port_msg(tenant_id, arg_dict, 'up') | Internal routine that gets called when a FW is created. |
def update_rating(self, postid):
post_data = self.get_post_data()
rating = float(post_data['rating'])
postinfo = MPost.get_by_uid(postid)
if postinfo and self.userinfo:
MRating.update(postinfo.uid, self.userinfo.uid, rating=rating)
self.update_post(postid)
else:
return False | only the used who logged in would voting. |
def inner(self, isolated=False):
if isolated:
return Frame(self.eval_ctx, level=self.symbols.level + 1)
return Frame(self.eval_ctx, self) | Return an inner frame. |
def client_list(self, *args):
if len(self._clients) == 0:
self.log('No clients connected')
else:
self.log(self._clients, pretty=True) | Display a list of connected clients |
def _maybe_dt_array(array):
if not isinstance(array, DataTable) or array is None:
return array
if array.shape[1] > 1:
raise ValueError('DataTable for label or weight cannot have multiple columns')
array = array.to_numpy()[:, 0].astype('float')
return array | Extract numpy array from single column data table |
def route_request(self, request_json, metadata=None):
request = Request(request_json)
request.metadata = metadata
handler_fn = self._handlers[self._default]
if not request.is_intent() and (request.request_type() in self._handlers):
handler_fn = self._handlers[request.request_type()]
elif request.is_intent() and request.intent_name() in self._handlers['IntentRequest']:
handler_fn = self._handlers['IntentRequest'][request.intent_name()]
response = handler_fn(request)
response.set_session(request.session)
return response.to_json() | Route the request object to the right handler function |
def tensor_size_guidance_from_flags(flags):
tensor_size_guidance = dict(DEFAULT_TENSOR_SIZE_GUIDANCE)
if not flags or not flags.samples_per_plugin:
return tensor_size_guidance
for token in flags.samples_per_plugin.split(','):
k, v = token.strip().split('=')
tensor_size_guidance[k] = int(v)
return tensor_size_guidance | Apply user per-summary size guidance overrides. |
def sdk_version(self):
if self.__sdk == 0:
try:
self.__sdk = int(self.adb.cmd("shell", "getprop", "ro.build.version.sdk").communicate()[0].decode("utf-8").strip())
except:
pass
return self.__sdk | sdk version of connected device. |
def split_bel_stmt(stmt: str, line_num) -> tuple:
m = re.match(f"^(.*?\))\s+([a-zA-Z=\->\|:]+)\s+([\w(]+.*?)$", stmt, flags=0)
if m:
return (m.group(1), m.group(2), m.group(3))
else:
log.info(
f"Could not parse bel statement into components at line number: {line_num} assertion: {stmt}"
)
return (stmt, None, None) | Split bel statement into subject, relation, object tuple |
def render(self):
context = self.context
if 'app' not in context:
context['app'] = self.application.name
temp_dir = self.temp_dir
templates_root = self.blueprint.templates_directory
for root, dirs, files in os.walk(templates_root):
for directory in dirs:
directory = os.path.join(root, directory)
directory = render_from_string(directory, context)
directory = directory.replace(templates_root, temp_dir, 1)
os.mkdir(directory)
for file in files:
full_file = os.path.join(root, file)
stat = os.stat(full_file)
content = render_from_file(full_file, context)
full_file = strip_extension(
render_from_string(full_file, context))
full_file = full_file.replace(templates_root, temp_dir, 1)
with open(full_file, 'w') as f:
f.write(content)
os.chmod(full_file, stat.st_mode) | Render the blueprint into a temp directory using the context. |
def list_corpus_files(dotted_path):
corpus_path = get_file_path(dotted_path, extension=CORPUS_EXTENSION)
paths = []
if os.path.isdir(corpus_path):
paths = glob.glob(corpus_path + '/**/*.' + CORPUS_EXTENSION, recursive=True)
else:
paths.append(corpus_path)
paths.sort()
return paths | Return a list of file paths to each data file in the specified corpus. |
def hilbert(ts):
output = signal.hilbert(signal.detrend(ts, axis=0), axis=0)
return Timeseries(output, ts.tspan, labels=ts.labels) | Analytic signal, using the Hilbert transform |
def compare_md5(self):
if self.direction == "put":
remote_md5 = self.remote_md5()
return self.source_md5 == remote_md5
elif self.direction == "get":
local_md5 = self.file_md5(self.dest_file)
return self.source_md5 == local_md5 | Compare md5 of file on network device to md5 of local file. |
def find_all(self, prefix):
prefix = ip_network(prefix)
if not self.prefix.overlaps(prefix) \
or self.prefix[0] > prefix[0] \
or self.prefix[-1] < prefix[-1]:
raise NotAuthoritativeError('This node is not authoritative for %r'
% prefix)
matches = set()
for child in self.children:
if prefix.overlaps(child.prefix):
matches.add(child)
return matches | Find everything in the given prefix |
def list_items(queue):
itemstuple = _list_items(queue)
items = [item[0] for item in itemstuple]
return items | List contents of a queue |
def validate(options):
try:
if options.backends.index('modelinstance') > options.backends.index('model'):
raise Exception("Metadata backend 'modelinstance' must come before 'model' backend")
except ValueError:
raise Exception("Metadata backend 'modelinstance' must be installed in order to use 'model' backend") | Validates the application of this backend to a given metadata |
def create(self, data, **kwargs):
self.client.post(self.url, data=data) | Create classifitions for specific entity |
def baseclass(self):
for cls in _BASE_CLASSES:
if isinstance(self, cls):
return cls
raise ValueError("Cannot determine the base class of %s" % self.__class__.__name__) | The baseclass of self. |
def _find_vm(name, data, quiet=False):
for hv_ in data:
if not isinstance(data[hv_], dict):
continue
if name in data[hv_].get('vm_info', {}):
ret = {hv_: {name: data[hv_]['vm_info'][name]}}
if not quiet:
__jid_event__.fire_event({'data': ret, 'outputter': 'nested'}, 'progress')
return ret
return {} | Scan the query data for the named VM |
def filter_data(data, filter_dict):
for key, match_string in filter_dict.items():
if key not in data:
logger.warning("{0} doesn't match a top level key".format(key))
continue
values = data[key]
matcher = re.compile(match_string)
if isinstance(values, list):
values = [v for v in values if matcher.search(v)]
elif isinstance(values, dict):
values = dict((k, v) for k, v in values.items() if matcher.search(k))
else:
raise MiuraException("cannot filter a {0}".format(type(values)))
data[key] = values | filter a data dictionary for values only matching the filter |
def _truncate(self, x, k):
not_F = np.argsort(np.abs(x))[:-k]
x[not_F] = 0
return x | given a vector x, leave its top-k absolute-value entries alone, and set the rest to 0 |
def imagej_metadata(self):
if not self.is_imagej:
return None
page = self.pages[0]
result = imagej_description_metadata(page.is_imagej)
if 'IJMetadata' in page.tags:
try:
result.update(page.tags['IJMetadata'].value)
except Exception:
pass
return result | Return consolidated ImageJ metadata as dict. |
def notifications(self):
self.__init()
items = []
for n in self._notifications:
if "id" in n:
url = "%s/%s" % (self.root, n['id'])
items.append(self.Notification(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port))
return items | gets the user's notifications |
def draw(self, surfaceObj):
if self._visible:
if self.buttonDown:
surfaceObj.blit(self.surfaceDown, self._rect)
elif self.mouseOverButton:
surfaceObj.blit(self.surfaceHighlight, self._rect)
else:
surfaceObj.blit(self.surfaceNormal, self._rect) | Blit the current button's appearance to the surface object. |
def pack(self):
return self.key + self.uid.ljust(pyhsm.defines.UID_SIZE, chr(0)) | Return key and uid packed for sending in a command to the YubiHSM. |
def parse_stream(stream):
code = []
for (line, col, (token, value)) in Tokenizer(stream).tokenize():
if token == Tokenizer.STRING:
value = '"' + value + '"'
code.append(value)
return code | Parse a Forth-like language and return code. |
def clean_resource_json(resource_json):
for a in ('parent_docname', 'parent', 'template', 'repr', 'series'):
if a in resource_json:
del resource_json[a]
props = resource_json['props']
for prop in (
'acquireds', 'style', 'in_nav', 'nav_title', 'weight',
'auto_excerpt'):
if prop in props:
del props[prop]
return resource_json | The catalog wants to be smaller, let's drop some stuff |
def dfs_do_func_on_graph(node, func, *args, **kwargs):
for _node in node.tree_iterator():
func(_node, *args, **kwargs) | invoke func on each node of the dr graph |
def unhook_wnd_proc(self):
if not self.__local_wnd_proc_wrapped:
return
SetWindowLong(self.__local_win_handle,
GWL_WNDPROC,
self.__old_wnd_proc)
self.__local_wnd_proc_wrapped = None | Restore previous Window message handler |
def set(self, varname, value, idx=0, units=None):
if not varname in self.mapping.vars:
raise fgFDMError('Unknown variable %s' % varname)
if idx >= self.mapping.vars[varname].arraylength:
raise fgFDMError('index of %s beyond end of array idx=%u arraylength=%u' % (
varname, idx, self.mapping.vars[varname].arraylength))
if units:
value = self.convert(value, units, self.mapping.vars[varname].units)
if math.isinf(value) or math.isnan(value) or math.fabs(value) > 3.4e38:
value = 0
self.values[self.mapping.vars[varname].index + idx] = value | set a variable value |
def cublasSgbmv(handle, trans, m, n, kl, ku, alpha, A, lda,
x, incx, beta, y, incy):
status = _libcublas.cublasSgbmv_v2(handle,
trans, m, n, kl, ku,
ctypes.byref(ctypes.c_float(alpha)),
int(A), lda,
int(x), incx,
ctypes.byref(ctypes.c_float(beta)),
int(y), incy)
cublasCheckStatus(status) | Matrix-vector product for real general banded matrix. |
def eval_file(file):
'evaluate file content as expressions'
fname = os.path.realpath(os.path.expanduser(file))
with open(fname) as f:
inscript = f.read()
sh = run_write_read(['plash', 'eval'], inscript.encode()).decode()
if sh.endswith('\n'):
return sh[:-1]
return sh | evaluate file content as expressions |
def to_user_agent(self):
ua = ""
if self.user_agent is not None:
ua += "{user_agent} "
ua += "gl-python/{python_version} "
if self.grpc_version is not None:
ua += "grpc/{grpc_version} "
ua += "gax/{api_core_version} "
if self.gapic_version is not None:
ua += "gapic/{gapic_version} "
if self.client_library_version is not None:
ua += "gccl/{client_library_version} "
return ua.format(**self.__dict__).strip() | Returns the user-agent string for this client info. |
def generate(self, pattern=None):
lst = self._lists[pattern]
while True:
result = lst[self._randrange(lst.length)]
n = len(result)
if (self._ensure_unique and len(set(result)) != n or
self._check_prefix and len(set(x[:self._check_prefix] for x in result)) != n or
self._max_slug_length and sum(len(x) for x in result) + n - 1 > self._max_slug_length):
continue
return result | Generates and returns random name as a list of strings. |
def _group(self, obj, val, behavior):
ns = self.Namespace()
ns.result = {}
iterator = self._lookupIterator(val)
def e(value, index, *args):
key = iterator(value, index)
behavior(ns.result, key, value)
_.each(obj, e)
if len(ns.result) == 1:
try:
return ns.result[0]
except KeyError:
return list(ns.result.values())[0]
return ns.result | An internal function used for aggregate "group by" operations. |
def info(gandi):
output_keys = ['handle', 'credit', 'prepaid']
account = gandi.account.all()
account['prepaid_info'] = gandi.contact.balance().get('prepaid', {})
output_account(gandi, account, output_keys)
return account | Display information about hosting account. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.