code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def ignore_logger(name_or_logger, allow_level=None):
def handler(logger, level, msg, args, kwargs):
if allow_level is not None and \
level >= allow_level:
return False
return True
register_special_log_handler(name_or_logger, handler) | Ignores a logger during breadcrumb recording. |
def _rm_gos_edges_rel(self, rm_goids, edges_rel):
edges_ret = {}
for rname, edges_cur in edges_rel.items():
edges_new = self._rm_gos_edges(rm_goids, edges_cur)
if edges_new:
edges_ret[rname] = edges_new
return edges_ret | Remove any relationship that contain user-specified edges. |
def _recursive_cleanup(foo):
if isinstance(foo, dict):
for (key, val) in list(foo.items()):
if isinstance(val, dict):
_recursive_cleanup(val)
if val == "" or val == [] or val == {}:
del foo[key] | Aggressively cleans up things that look empty. |
def drange(v0, v1, d):
assert v0 < v1
return xrange(int(v0)//d, int(v1+d)//d) | Returns a discrete range. |
def _check_valid_basic(self, get_params):
try:
if get_params(self.variable):
return self.default
except:
pass
return not self.default | Simple check that the variable is set |
def cmd_terrain_check(self, args):
if len(args) >= 2:
latlon = (float(args[0]), float(args[1]))
else:
try:
latlon = self.module('map').click_position
except Exception:
print("No map available")
return
if latlon is None:
print("No map click position available")
return
self.check_lat = int(latlon[0]*1e7)
self.check_lon = int(latlon[1]*1e7)
self.master.mav.terrain_check_send(self.check_lat, self.check_lon) | check a piece of terrain data |
def merge_dict(dict_1, *other, **kw):
tmp = dict_1.copy()
for x in other:
tmp.update(x)
tmp.update(kw)
return tmp | Merge two or more dict including kw into result dict. |
def create_app():
global QT_APP
QT_APP = QApplication.instance()
if QT_APP is None:
QT_APP = QApplication(sys.argv)
return QT_APP | Create a Qt application. |
def reverse(self):
clone = self._clone()
assert self._ordering, "You need to set an ordering for reverse"
ordering = []
for order in self._ordering:
for k,v in order.items():
if v=="asc":
ordering.append({k: "desc"})
else:
ordering.append({k: "asc"})
clone._ordering=ordering
return clone | Reverses the ordering of the QuerySet. |
def str_append_hash(*args):
ret_hash = ""
for i in args:
ret_hash += str(i).lower()
return hash(ret_hash) | Convert each argument to a lower case string, appended, then hash |
def divide_url(self, url):
if 'https://' in url:
host = url[8:].split('/')[0]
path = url[8 + len(host):]
elif 'http://' in url:
host = url[7:].split('/')[0]
path = url[7 + len(host):]
else:
host = url.split('/')[0]
path = url[len(host):]
return host, path | divide url into host and path two parts |
def _pnorm_default(x, p):
return np.linalg.norm(x.data.ravel(), ord=p) | Default p-norm implementation. |
def remove(self):
print 'remove'
if self.exists():
print 'cleaning', self.venv
run('rm -rf {}'.format(self.venv)) | Remove the virtual environment completely |
def inherit_doc(cls):
for name, func in vars(cls).items():
if name.startswith("_"):
continue
if not func.__doc__:
for parent in cls.__bases__:
parent_func = getattr(parent, name, None)
if parent_func and getattr(parent_func, "__doc__", None):
func.__doc__ = parent_func.__doc__
break
return cls | A decorator that makes a class inherit documentation from its parents. |
def execd_run(command, execd_dir=None, die_on_error=True, stderr=subprocess.STDOUT):
for submodule_path in execd_submodule_paths(command, execd_dir):
try:
subprocess.check_output(submodule_path, stderr=stderr,
universal_newlines=True)
except subprocess.CalledProcessError as e:
hookenv.log("Error ({}) running {}. Output: {}".format(
e.returncode, e.cmd, e.output))
if die_on_error:
sys.exit(e.returncode) | Run command for each module within execd_dir which defines it. |
def create_inquirer_layout(
ic: InquirerControl,
get_prompt_tokens: Callable[[], List[Tuple[Text, Text]]],
**kwargs) -> Layout:
ps = PromptSession(get_prompt_tokens, reserve_space_for_menu=0, **kwargs)
_fix_unecessary_blank_lines(ps)
return Layout(HSplit([
ps.layout.container,
ConditionalContainer(
Window(ic),
filter=~IsDone()
)
])) | Create a layout combining question and inquirer selection. |
def pol2cart(theta, rho):
x = rho * np.cos(theta)
y = rho * np.sin(theta)
return x, y | Polar to Cartesian coordinates conversion. |
def next(self):
line = self.filehandle.readline()
line = line.decode('utf-8', 'replace')
if line == '':
raise StopIteration
line = line.rstrip('\n')
le = LogEvent(line)
if self._datetime_format and self._datetime_nextpos is not None:
ret = le.set_datetime_hint(self._datetime_format,
self._datetime_nextpos,
self.year_rollover)
if not ret:
self._datetime_format = None
self._datetime_nextpos = None
elif le.datetime:
self._datetime_format = le.datetime_format
self._datetime_nextpos = le._datetime_nextpos
return le | Get next line, adjust for year rollover and hint datetime format. |
def onClose(self, was_clean, code, reason):
logger.debug("Connection closed ({peer})".format(peer=self.peer))
self.factory.mease.publisher.publish(
message_type=ON_CLOSE, client_id=self._client_id, client_storage=self.storage)
self.factory.remove_client(self) | Called when a client closes a websocket connection |
def generic_html(self, result, errors):
h1 = htmlize(type(result))
out = []
result = pre_process_json(result)
if not hasattr(result, 'items'):
header = "<tr><th>Value</th></tr>"
if type(result) is list:
result = htmlize_list(result)
else:
result = htmlize(result)
out = ["<tr><td>" + result + "</td></tr>"]
elif hasattr(result, 'lower'):
out = ["<tr><td>" + result + "</td></tr>"]
else:
header = "<tr><th>Key</th><th>Value</th></tr>"
for key, value in result.items():
v = htmlize(value)
row = "<tr><td>{0}</td><td>{1}</td></tr>".format(key, v)
out.append(row)
env = Environment(loader=PackageLoader('giotto'))
template = env.get_template('generic.html')
rendered = template.render({'header': h1, 'table_header': header, 'table_body': out})
return {'body': rendered, 'mimetype': 'text/html'} | Try to display any object in sensible HTML. |
def _add_cpu_percent(self, cur_read):
for executor_id, cur_data in cur_read.items():
stats = cur_data['statistics']
cpus_limit = stats.get('cpus_limit')
cpus_utilisation = stats.get('cpus_utilisation')
if cpus_utilisation and cpus_limit != 0:
stats['cpus_percent'] = cpus_utilisation / cpus_limit | Compute cpu percent basing on the provided utilisation |
def build_archive(cls, **kwargs):
if cls._archive is None:
cls._archive = cls(**kwargs)
return cls._archive | Return the singleton `JobArchive` instance, building it if needed |
def version_from_frame(frame):
module = getmodule(frame)
if module is None:
s = "<unknown from {0}:{1}>"
return s.format(frame.f_code.co_filename, frame.f_lineno)
module_name = module.__name__
variable = "AUTOVERSION_{}".format(module_name.upper())
override = os.environ.get(variable, None)
if override is not None:
return override
while True:
try:
get_distribution(module_name)
except DistributionNotFound:
module_name, dot, _ = module_name.partition(".")
if dot == "":
break
else:
return getversion(module_name)
return None | Given a ``frame``, obtain the version number of the module running there. |
def strip_spaces(value, sep=None, join=True):
value = value.strip()
value = [v.strip() for v in value.split(sep)]
join_sep = sep or ' '
return join_sep.join(value) if join else value | Cleans trailing whitespaces and replaces also multiple whitespaces with a single space. |
def from_dbus_fact(fact):
return Fact(fact[4],
start_time = dt.datetime.utcfromtimestamp(fact[1]),
end_time = dt.datetime.utcfromtimestamp(fact[2]) if fact[2] else None,
description = fact[3],
activity_id = fact[5],
category = fact[6],
tags = fact[7],
date = dt.datetime.utcfromtimestamp(fact[8]).date(),
id = fact[0]
) | unpack the struct into a proper dict |
def _linux_memdata():
grains = {'mem_total': 0, 'swap_total': 0}
meminfo = '/proc/meminfo'
if os.path.isfile(meminfo):
with salt.utils.files.fopen(meminfo, 'r') as ifile:
for line in ifile:
comps = line.rstrip('\n').split(':')
if not len(comps) > 1:
continue
if comps[0].strip() == 'MemTotal':
grains['mem_total'] = int(comps[1].split()[0]) // 1024
if comps[0].strip() == 'SwapTotal':
grains['swap_total'] = int(comps[1].split()[0]) // 1024
return grains | Return the memory information for Linux-like systems |
def transform_dot(self, node, results):
module_dot = results.get("bare_with_attr")
member = results.get("member")
new_name = None
if isinstance(member, list):
member = member[0]
for change in MAPPING[module_dot.value]:
if member.value in change[1]:
new_name = change[0]
break
if new_name:
module_dot.replace(Name(new_name,
prefix=module_dot.prefix))
else:
self.cannot_convert(node, "This is an invalid module element") | Transform for calls to module members in code. |
def allow_unregister(self, plugin_override=True):
vals = self._hook_manager.call_hook('course_allow_unregister', course=self, default=self._allow_unregister)
return vals[0] if len(vals) and plugin_override else self._allow_unregister | Returns True if students can unregister from course |
def make_json_formatter(graph):
return {
"()": graph.config.logging.json_formatter.formatter,
"fmt": graph.config.logging.json_required_keys,
} | Create the default json formatter. |
def whitelist(ctx, whitelist_account, account):
account = Account(account, blockchain_instance=ctx.blockchain)
print_tx(account.whitelist(whitelist_account)) | Add an account to a whitelist |
def hyperparameters(self):
hyperparameters = super(Chainer, self).hyperparameters()
additional_hyperparameters = {Chainer._use_mpi: self.use_mpi,
Chainer._num_processes: self.num_processes,
Chainer._process_slots_per_host: self.process_slots_per_host,
Chainer._additional_mpi_options: self.additional_mpi_options}
additional_hyperparameters = {k: v for k, v in additional_hyperparameters.items() if v}
hyperparameters.update(Framework._json_encode_hyperparameters(additional_hyperparameters))
return hyperparameters | Return hyperparameters used by your custom Chainer code during training. |
def teardown(self):
if self.controller:
self.controller.teardown()
for monitor in self.monitors:
monitor.teardown() | Clean up the target once all tests are completed |
def _is_tp(pkt):
tp = [SOMEIP.TYPE_TP_REQUEST, SOMEIP.TYPE_TP_REQUEST_NO_RET,
SOMEIP.TYPE_TP_NOTIFICATION, SOMEIP.TYPE_TP_RESPONSE,
SOMEIP.TYPE_TP_ERROR]
if isinstance(pkt, Packet):
return pkt.msg_type in tp
else:
return pkt[15] in tp | Returns true if pkt is using SOMEIP-TP, else returns false. |
def _prepare_uimodules(self):
for key, value in self._config.get(config.UI_MODULES, {}).iteritems():
self._config[config.UI_MODULES][key] = self._import_class(value)
self._config[config.UI_MODULES] = dict(self._config[config.UI_MODULES] or {}) | Prepare the UI Modules from a list of namespaced paths. |
def _get_firewall_rules(firewall_rules):
ret = []
for key, value in six.iteritems(firewall_rules):
if 'protocol' not in firewall_rules[key].keys():
raise SaltCloudConfigError(
'The firewall rule \'{0}\' is missing \'protocol\''.format(key)
)
ret.append(FirewallRule(
name=key,
protocol=firewall_rules[key].get('protocol', None),
source_mac=firewall_rules[key].get('source_mac', None),
source_ip=firewall_rules[key].get('source_ip', None),
target_ip=firewall_rules[key].get('target_ip', None),
port_range_start=firewall_rules[key].get('port_range_start', None),
port_range_end=firewall_rules[key].get('port_range_end', None),
icmp_type=firewall_rules[key].get('icmp_type', None),
icmp_code=firewall_rules[key].get('icmp_code', None)
))
return ret | Construct a list of optional firewall rules from the cloud profile. |
def _get_data_from_empty_list(source, fields='*', first_row=0, count=-1, schema=None):
fields = get_field_list(fields, schema)
return {'cols': _get_cols(fields, schema), 'rows': []}, 0 | Helper function for _get_data that handles empty lists. |
def log_error(self, callback, error=None):
print("Uncaught error during callback: {}".format(callback))
print("Error: {}".format(error)) | Log the error that occurred when running the given callback. |
def state(anon, obj, field, val):
return anon.faker.state(field=field) | Returns a randomly selected US state code |
async def save(self, request, response):
if isinstance(response, Response) and SESSION_KEY in request and not response.prepared:
session = request[SESSION_KEY]
if session.save(response.set_cookie):
self.app.logger.debug('Session saved: %s', session) | Save session to response cookies. |
def changelist_view(self, request, extra_context=None):
return super(TrackedLiveAdmin, self).changelist_view(
request, dict(extra_context or {},
url_name='admin:%s_%s_tracking_report' % (self.model._meta.app_label, self.model._meta.model_name),
period_options=self.get_period_options(),
report_options=self.get_report_options())
) | Updates the changelist view to include settings from this admin. |
def _symmetrize_correlograms(correlograms):
n_clusters, _, n_bins = correlograms.shape
assert n_clusters == _
correlograms[..., 0] = np.maximum(correlograms[..., 0],
correlograms[..., 0].T)
sym = correlograms[..., 1:][..., ::-1]
sym = np.transpose(sym, (1, 0, 2))
return np.dstack((sym, correlograms)) | Return the symmetrized version of the CCG arrays. |
def track_download_request(download_url, download_title):
from indico_piwik.plugin import PiwikPlugin
if not download_url:
raise ValueError("download_url can't be empty")
if not download_title:
raise ValueError("download_title can't be empty")
request = PiwikRequest(server_url=PiwikPlugin.settings.get('server_api_url'),
site_id=PiwikPlugin.settings.get('site_id_events'),
api_token=PiwikPlugin.settings.get('server_token'),
query_script=PiwikPlugin.track_script)
action_url = quote(download_url)
dt = datetime.now()
request.call(idsite=request.site_id,
rec=1,
action_name=quote(download_title.encode('utf-8')),
url=action_url,
download=action_url,
h=dt.hour, m=dt.minute, s=dt.second) | Track a download in Piwik |
def scenario(ctx, dependency_name, driver_name, lint_name, provisioner_name,
role_name, scenario_name, verifier_name):
command_args = {
'dependency_name': dependency_name,
'driver_name': driver_name,
'lint_name': lint_name,
'provisioner_name': provisioner_name,
'role_name': role_name,
'scenario_name': scenario_name,
'subcommand': __name__,
'verifier_name': verifier_name,
}
if verifier_name == 'inspec':
command_args['verifier_lint_name'] = 'rubocop'
if verifier_name == 'goss':
command_args['verifier_lint_name'] = 'yamllint'
if verifier_name == 'ansible':
command_args['verifier_lint_name'] = 'ansible-lint'
s = Scenario(command_args)
s.execute() | Initialize a new scenario for use with Molecule. |
def cache_items(values):
import os
config_path = os.path.expanduser('~/.config/blockade')
file_path = os.path.join(config_path, 'cache.txt')
if not os.path.isfile(file_path):
file(file_path, 'w').close()
written = [x.strip() for x in open(file_path, 'r').readlines()]
handle = open(file_path, 'a')
for item in values:
if is_hashed(item):
hashed = item
else:
hashed = hash_values(item)
if hashed in written:
continue
handle.write(hashed + "\n")
handle.close()
return True | Cache indicators that were successfully sent to avoid dups. |
def check_exclamations_ppm(text):
err = "leonard.exclamation.30ppm"
msg = u"More than 30 ppm of exclamations. Keep them under control."
regex = r"\w!"
count = len(re.findall(regex, text))
num_words = len(text.split(" "))
ppm = (count*1.0 / num_words) * 1e6
if ppm > 30 and count > 1:
loc = re.search(regex, text).start() + 1
return [(loc, loc+1, err, msg, ".")]
else:
return [] | Make sure that the exclamation ppm is under 30. |
def addEdgeToGraph(parentNodeName, childNodeName, graphFileHandle, colour="black", length="10", weight="1", dir="none", label="", style=""):
graphFileHandle.write('edge[color=%s,len=%s,weight=%s,dir=%s,label="%s",style=%s];\n' % (colour, length, weight, dir, label, style))
graphFileHandle.write("%s -- %s;\n" % (parentNodeName, childNodeName)) | Links two nodes in the graph together. |
def register_job_from_link(self, link, key, **kwargs):
job_config = kwargs.get('job_config', None)
if job_config is None:
job_config = link.args
status = kwargs.get('status', JobStatus.unknown)
job_details = JobDetails(jobname=link.linkname,
jobkey=key,
appname=link.appname,
logfile=kwargs.get('logfile'),
jobconfig=job_config,
timestamp=get_timestamp(),
file_dict=copy.deepcopy(link.files),
sub_file_dict=copy.deepcopy(link.sub_files),
status=status)
self.register_job(job_details)
return job_details | Register a job in the `JobArchive` from a `Link` object |
def cwms_process_text():
if request.method == 'OPTIONS':
return {}
response = request.body.read().decode('utf-8')
body = json.loads(response)
text = body.get('text')
cp = cwms.process_text(text)
return _stmts_from_proc(cp) | Process text with CWMS and return INDRA Statements. |
def dateJDN(year, month, day, calendar):
a = (14 - month) // 12
y = year + 4800 - a
m = month + 12*a - 3
if calendar == GREGORIAN:
return day + (153*m + 2)//5 + 365*y + y//4 - y//100 + y//400 - 32045
else:
return day + (153*m + 2)//5 + 365*y + y//4 - 32083 | Converts date to Julian Day Number. |
def _get_source_sum(source_hash, file_path, saltenv):
ret = dict()
schemes = ('salt', 'http', 'https', 'ftp', 'swift', 's3', 'file')
invalid_hash_msg = ("Source hash '{0}' format is invalid. It must be in "
"the format <hash type>=<hash>").format(source_hash)
source_hash = six.text_type(source_hash)
source_hash_scheme = _urlparse(source_hash).scheme
if source_hash_scheme in schemes:
cached_hash_file = __salt__['cp.cache_file'](source_hash, saltenv)
if not cached_hash_file:
raise CommandExecutionError(('Source hash file {0} not'
' found').format(source_hash))
ret = __salt__['file.extract_hash'](cached_hash_file, '', file_path)
if ret is None:
raise SaltInvocationError(invalid_hash_msg)
else:
items = source_hash.split('=', 1)
if len(items) != 2:
invalid_hash_msg = ('{0}, or it must be a supported protocol'
': {1}').format(invalid_hash_msg,
', '.join(schemes))
raise SaltInvocationError(invalid_hash_msg)
ret['hash_type'], ret['hsum'] = [item.strip().lower() for item in items]
return ret | Extract the hash sum, whether it is in a remote hash file, or just a string. |
def read_input_data(filename):
logging.info('Opening file %s for reading input', filename)
input_file = open(filename, 'r')
data = []
labels = []
for line in input_file:
tokens = line.split(',', 1)
labels.append(tokens[0].strip())
data.append(tokens[1].strip())
return labels, data | Helper function to get training data |
def task_class(self):
from scenario_player.tasks.base import get_task_class_for_type
root_task_type, _ = self.task
task_class = get_task_class_for_type(root_task_type)
return task_class | Return the Task class type configured for the scenario. |
def __generate_key(self, config):
cwd = config.get('ssh_path', self._install_directory())
if config.is_affirmative('create', default="yes"):
if not os.path.exists(cwd):
os.makedirs(cwd)
if not os.path.exists(os.path.join(cwd, config.get('keyname'))):
command = "ssh-keygen -t %(type)s -f %(keyname)s -N " % config.to_dict()
lib.call(command, cwd=cwd, output_log_level=logging.DEBUG)
if not config.has('ssh_path'):
config.set('ssh_path', cwd)
config.set('ssh_key_path', os.path.join(config.get('ssh_path'), config.get('keyname'))) | Generate the ssh key, and return the ssh config location |
def remove_accounts_from_project(accounts_query, project):
query = accounts_query.filter(date_deleted__isnull=True)
for account in query:
remove_account_from_project(account, project) | Remove accounts from project. |
def _checkblk(name):
blk = __salt__['cmd.run']('blkid -o value -s TYPE {0}'.format(name),
ignore_retcode=True)
return '' if not blk else blk | Check if the blk exists and return its fstype if ok |
def build_lines_data(self, code_obj):
if self.version > 1.4:
linestarts = list(self.opc.findlinestarts(code_obj))
else:
linestarts = [[0, 1]]
self.linestarts = dict(linestarts)
lines = []
LineTuple = namedtuple('LineTuple', ['l_no', 'next'])
_, prev_line_no = linestarts[0]
offset = 0
for start_offset, line_no in linestarts[1:]:
while offset < start_offset:
lines.append(LineTuple(prev_line_no, start_offset))
offset += 1
prev_line_no = line_no
codelen = len(self.code)
while offset < codelen:
lines.append(LineTuple(prev_line_no, codelen))
offset += 1
return lines | Generate various line-related helper data. |
def request_reset(self, event):
self.log('Password reset request received:', event.__dict__, lvl=hilight)
user_object = objectmodels['user']
email = event.data.get('email', None)
email_user = None
if email is not None and user_object.count({'mail': email}) > 0:
email_user = user_object.find_one({'mail': email})
if email_user is None:
self._fail(event, msg="Mail address unknown")
return | An anonymous client requests a password reset |
def clear(self):
self.title = None
self.numbers = np.zeros(0, int)
self.atom_types = []
self.charges = []
self.names = []
self.molecules = np.zeros(0, int)
self.bonds = np.zeros((0, 2), int)
self.bends = np.zeros((0, 3), int)
self.dihedrals = np.zeros((0, 4), int)
self.impropers = np.zeros((0, 4), int)
self.name_cache = {} | Clear the contents of the data structure |
def getInput():
input = ''
if sys.platform == 'win32':
import msvcrt
if msvcrt.kbhit():
input += msvcrt.getch()
print_(input)
else:
time.sleep(.1)
else:
sock = sys.stdin.fileno()
while len(select.select([sock], [], [], 0.1)[0]) > 0:
input += decode(os.read(sock, 4096))
return input | Read the input buffer without blocking the system. |
def concat_generator(filename, up_threshold, low_threshold=10):
txt = ""
for line in tf.gfile.Open(filename):
line = line.strip()
if len(txt) + len(line) + 1 >= up_threshold:
ret = txt
txt = ""
if len(ret) > low_threshold and len(ret) < up_threshold:
yield {"targets": ret}
if not txt:
txt = line
else:
txt = " ".join([txt, line]) | Generate concatenated lines from file upto up_threshold characters. |
def grad(self):
from . import _ndarray_cls
hdl = NDArrayHandle()
check_call(_LIB.MXNDArrayGetGrad(self.handle, ctypes.byref(hdl)))
if hdl.value is None:
return None
return _ndarray_cls(hdl) | Returns gradient buffer attached to this NDArray. |
def find_module(self, fullname, path=None):
basepaths = [""] + list(sys.path)
if fullname.startswith("."):
if path is None:
return None
fullname = fullname[1:]
basepaths.insert(0, path)
fullpath = os.path.join(*fullname.split("."))
for head in basepaths:
path = os.path.join(head, fullpath)
filepath = path + self.ext
dirpath = os.path.join(path, "__init__" + self.ext)
if os.path.exists(filepath):
self.run_compiler(filepath)
return None
if os.path.exists(dirpath):
self.run_compiler(path)
return None
return None | Searches for a Coconut file of the given name and compiles it. |
def read_property(f, endianness="<"):
prop_name = types.String.read(f, endianness)
prop_data_type = types.tds_data_types[types.Uint32.read(f, endianness)]
value = prop_data_type.read(f, endianness)
log.debug("Property %s: %r", prop_name, value)
return prop_name, value | Read a property from a segment's metadata |
def append_payload(self, payload: Payload) -> Payload:
encoding = payload.headers.get(CONTENT_ENCODING, '').lower()
if encoding and encoding not in ('deflate', 'gzip', 'identity'):
raise RuntimeError('unknown content encoding: {}'.format(encoding))
if encoding == 'identity':
encoding = None
te_encoding = payload.headers.get(
CONTENT_TRANSFER_ENCODING, '').lower()
if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'):
raise RuntimeError('unknown content transfer encoding: {}'
''.format(te_encoding))
if te_encoding == 'binary':
te_encoding = None
size = payload.size
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding))
return payload | Adds a new body part to multipart writer. |
def drop_streams(streams, kdims, keys):
stream_params = stream_parameters(streams)
inds, dims = zip(*[(ind, kdim) for ind, kdim in enumerate(kdims)
if kdim not in stream_params])
get = operator.itemgetter(*inds)
keys = (get(k) for k in keys)
return dims, ([wrap_tuple(k) for k in keys] if len(inds) == 1 else list(keys)) | Drop any dimensioned streams from the keys and kdims. |
def _move_centroids(self):
for k in range(self.n_clusters):
if k in self.clusters:
centroid = np.mean(self._X[self.clusters == k, :], axis=0)
self.centroids[k] = centroid
else:
self.n_clusters-=1
self.centroids = self.centroids[:self.n_clusters]
self.clusters-=1
k-=1 | Calculate new centroids as the means of the samples in each cluster |
def suppressConsoleOut(meth):
@wraps(meth)
def decorate(*args, **kwargs):
_stdout = sys.stdout
fptr = open(os.devnull, 'w')
sys.stdout = fptr
try:
return meth(*args, **kwargs)
except Exception as e:
raise e
finally:
sys.stdout = _stdout
return decorate | Disable console output during the method is run. |
def _lookup_style(element, names):
return _STYLES.get('_'+element, '') + \
''.join([_STYLES.get(name, '') for name in names]) | Lookup style by either element name or the list of classes |
async def release_name_async(self, bus_name, error = None, timeout = DBUS.TIMEOUT_USE_DEFAULT) :
"releases a registered bus name."
assert self.loop != None, "no event loop to attach coroutine to"
return \
await self.connection.bus_release_name_async(bus_name, error = error, timeout = timeout) | releases a registered bus name. |
def open(self):
self._connection = \
amqp.Connection(host='%s:%s' % (self.hostname, self.port),
userid=self.username, password=self.password,
virtual_host=self.virtual_host, insist=False)
self.channel = self._connection.channel() | Open a connection to the AMQP compliant broker. |
def uninstall(self, bug: Bug) -> bool:
r = self.__api.post('bugs/{}/uninstall'.format(bug.name))
raise NotImplementedError | Uninstalls the Docker image associated with a given bug. |
def create_response(version, status, headers):
message = []
message.append('HTTP/{} {}\r\n'.format(version, status))
for name, value in headers:
message.append(name)
message.append(': ')
message.append(value)
message.append('\r\n')
message.append('\r\n')
return s2b(''.join(message)) | Create a HTTP response header. |
def _merge_nbval_coverage_data(cov):
if not cov:
return
suffix = _make_suffix(cov)
if suffix is True:
return
filename = cov.data_files.filename + '.' + suffix
nbval_data = coverage.CoverageData(debug=cov.debug)
try:
nbval_data.read_file(os.path.abspath(filename))
except coverage.CoverageException:
return
aliases = None
if cov.config.paths:
aliases = coverage.files.PathAliases()
for paths in cov.config.paths.values():
result = paths[0]
for pattern in paths[1:]:
aliases.add(pattern, result)
cov.data.update(nbval_data, aliases=aliases)
coverage.misc.file_be_gone(filename) | Merge nbval coverage data into pytest-cov data. |
def extern_store_tuple(self, context_handle, vals_ptr, vals_len):
c = self._ffi.from_handle(context_handle)
return c.to_value(tuple(c.from_value(val[0]) for val in self._ffi.unpack(vals_ptr, vals_len))) | Given storage and an array of Handles, return a new Handle to represent the list. |
def visit(self, node):
for child in node:
yield child
for subchild in self.visit(child):
yield subchild | Returns a generator that walks all children recursively. |
def league_scores(self, total_data, time):
data = []
for league, score in self.supported_leagues(total_data):
item = {'league': league, 'homeTeamName': score['homeTeamName'],
'goalsHomeTeam': score['result']['goalsHomeTeam'],
'goalsAwayTeam': score['result']['goalsAwayTeam'],
'awayTeamName': score['awayTeamName']}
data.append(item)
self.generate_output({'league_scores': data, 'time': time}) | Store output of fixtures based on league and time to a JSON file |
def raw_tag(name, value):
return name.encode('utf-8') + \
len(value).to_bytes(4, byteorder='big') + \
value | Create a DMAP tag with raw data. |
def _generate_grid(self):
grid_axes = []
for _, param in self.tunables:
grid_axes.append(param.get_grid_axis(self.grid_width))
return grid_axes | Get the all possible values for each of the tunables. |
def from_stream(cls, f, **kwargs):
lines = lines_from_stream(f)
if 'meta' not in kwargs:
kwargs['meta'] = {'from': 'stream'}
kwargs['meta']['filepath'] = f.name if hasattr(f, 'name') else None
return cls(lines, **kwargs) | Create an editor instance from a file stream. |
def _where(self, filter_fn):
assert callable(filter_fn), 'filter_fn needs to be callable'
return VList(i for i in self if filter_fn(i())) | use this to filter VLists, simply provide a filter function to filter the current found objects |
def next(self):
self._parse_block()
if self._remaining > 0:
self._remaining -= 1
return six.next(self._iter_rows) | Get the next row in the page. |
def enqueue(self, item_type, item):
with self.enlock:
self.queue[item_type].append(item) | Queue a new data item, make item iterable |
def UndoTransaction(self):
from Ucs import ConfigMap
self._transactionInProgress = False
self._configMap = ConfigMap() | Cancels any running transaction. |
def project_drawn(cb, msg):
stream = cb.streams[0]
old_data = stream.data
stream.update(data=msg['data'])
element = stream.element
stream.update(data=old_data)
proj = cb.plot.projection
if not isinstance(element, _Element) or element.crs == proj:
return None
crs = element.crs
element.crs = proj
return project(element, projection=crs) | Projects a drawn element to the declared coordinate system |
def load_handler(self):
handler_path = self.handler_name.split(".")
handler_module = __import__(".".join(handler_path[:-1]), {}, {}, str(handler_path[-1]))
self.handler = getattr(handler_module, handler_path[-1])() | Load the detected handler. |
def recv_rpc(self, context, payload):
logger.debug("Adding RPC payload to ControlBuffer queue: %s", payload)
self.buf.put(('rpc', (context, payload)))
with self.cv:
self.cv.notifyAll() | Call from any thread |
def _maybe_connect(self, node_id):
with self._lock:
conn = self._conns.get(node_id)
if conn is None:
broker = self.cluster.broker_metadata(node_id)
assert broker, 'Broker id %s not in current metadata' % (node_id,)
log.debug("Initiating connection to node %s at %s:%s",
node_id, broker.host, broker.port)
host, port, afi = get_ip_port_afi(broker.host)
cb = WeakMethod(self._conn_state_change)
conn = BrokerConnection(host, broker.port, afi,
state_change_callback=cb,
node_id=node_id,
**self.config)
self._conns[node_id] = conn
elif self._should_recycle_connection(conn):
self._conns.pop(node_id)
return False
elif conn.connected():
return True
conn.connect()
return conn.connected() | Idempotent non-blocking connection attempt to the given node id. |
def uniquenessRatio(self, value):
if value >= 5 and value <= 15:
self._uniqueness = value
else:
raise InvalidUniquenessRatioError("Uniqueness ratio must be "
"between 5 and 15.")
self._replace_bm() | Set private ``_uniqueness`` and reset ``_block_matcher``. |
def _run_workflow(items, paired, workflow_file, work_dir):
utils.remove_safe(os.path.join(work_dir, "workspace"))
data = paired.tumor_data if paired else items[0]
cmd = [utils.get_program_python("configManta.py"), workflow_file, "-m", "local", "-j", dd.get_num_cores(data)]
do.run(cmd, "Run manta SV analysis")
utils.remove_safe(os.path.join(work_dir, "workspace")) | Run manta analysis inside prepared workflow directory. |
def _get_cibpath():
cibpath = os.path.join(__opts__['cachedir'], 'pcs', __env__)
log.trace('cibpath: %s', cibpath)
return cibpath | Get the path to the directory on the minion where CIB's are saved |
def _format_object(obj, format_type=None):
if json_api_settings.FORMAT_KEYS is not None:
return format_keys(obj, format_type)
return format_field_names(obj, format_type) | Depending on settings calls either `format_keys` or `format_field_names` |
def to_bigquery_field(self, name_case=DdlParseBase.NAME_CASE.original):
col_name = self.get_name(name_case)
mode = self.bigquery_mode
if self.array_dimensional <= 1:
type = self.bigquery_legacy_data_type
else:
type = "RECORD"
fields = OrderedDict()
fields_cur = fields
for i in range(1, self.array_dimensional):
is_last = True if i == self.array_dimensional - 1 else False
fields_cur['fields'] = [OrderedDict()]
fields_cur = fields_cur['fields'][0]
fields_cur['name'] = "dimension_{}".format(i)
fields_cur['type'] = self.bigquery_legacy_data_type if is_last else "RECORD"
fields_cur['mode'] = self.bigquery_mode if is_last else "REPEATED"
col = OrderedDict()
col['name'] = col_name
col['type'] = type
col['mode'] = mode
if self.array_dimensional > 1:
col['fields'] = fields['fields']
return json.dumps(col) | Generate BigQuery JSON field define |
def _distarray_missing(self, xc, xd, cdiffs):
cindices = []
dindices = []
for i in range(self._datalen):
cindices.append(np.where(np.isnan(xc[i]))[0])
dindices.append(np.where(np.isnan(xd[i]))[0])
if self.n_jobs != 1:
dist_array = Parallel(n_jobs=self.n_jobs)(delayed(get_row_missing)(
xc, xd, cdiffs, index, cindices, dindices) for index in range(self._datalen))
else:
dist_array = [get_row_missing(xc, xd, cdiffs, index, cindices, dindices)
for index in range(self._datalen)]
return np.array(dist_array) | Distance array calculation for data with missing values |
def to_ufo_background_image(self, ufo_glyph, layer):
image = layer.backgroundImage
if image is None:
return
ufo_image = ufo_glyph.image
ufo_image.fileName = image.path
ufo_image.transformation = image.transform
ufo_glyph.lib[CROP_KEY] = list(image.crop)
ufo_glyph.lib[LOCKED_KEY] = image.locked
ufo_glyph.lib[ALPHA_KEY] = image.alpha | Copy the backgound image from the GSLayer to the UFO Glyph. |
def list(gandi, domain, limit):
options = {'items_per_page': limit}
mailboxes = gandi.mail.list(domain, options)
output_list(gandi, [mbox['login'] for mbox in mailboxes])
return mailboxes | List mailboxes created on a domain. |
def make_html_page(self, valumap):
logger.info('Making an html report using template %r.', self.html_template)
fh = open(self.html_template)
template = fh.read()
fh.close()
parts = []
for sr in self.subreports:
report_data = [item.html for item in sr.report_data if item.html]
if report_data:
parts.append('\n<h2>{1}</h2>\n'.format(sr.title, sr.reptext))
parts.extend(report_data)
parts.append('\n<hr/>')
valumap['subreports'] = '\n'.join(parts)
html_page = Template(template).safe_substitute(valumap)
return TextPart(fmt='html', text=html_page, ext='html') | Builds the report as html page, using the template page from file. |
def CheckRequestsForCompletion(self, requests):
subjects = [r.session_id.Add("state") for r in requests]
statuses_found = {}
for subject, result in self.MultiResolvePrefix(subjects,
self.FLOW_STATUS_PREFIX):
for predicate, _, _ in result:
request_nr = int(predicate.split(":")[-1], 16)
statuses_found.setdefault(subject, set()).add(request_nr)
status_available = set()
for r in requests:
if r.request_id in statuses_found.get(r.session_id.Add("state"), set()):
status_available.add(r)
return status_available | Checks if there is a status message queued for a number of requests. |
def app(environ, start_response):
r = HttpRequestHandler(environ, start_response, Router).dispatch()
return r | Function called by the WSGI server. |
def __notify(self):
if self.__callback is not None:
try:
self.__callback(
self._done_event.data,
self._done_event.exception,
self.__extra,
)
except Exception as ex:
self._logger.exception("Error calling back method: %s", ex) | Notify the given callback about the result of the execution |
def insert_tracking_record(self):
if self._insert_tracking_record is None:
self._insert_tracking_record = self._prepare_insert(
tmpl=self._insert_values_tmpl,
placeholder_for_id=True,
record_class=self.tracking_record_class,
field_names=self.tracking_record_field_names,
)
return self._insert_tracking_record | SQL statement that inserts tracking records. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.