code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def cli(env):
table = formatting.Table(['Code', 'Reason'])
table.align['Code'] = 'r'
table.align['Reason'] = 'l'
mgr = SoftLayer.HardwareManager(env.client)
for code, reason in mgr.get_cancellation_reasons().items():
table.add_row([code, reason])
env.fout(table) | Display a list of cancellation reasons. |
def message(self, data):
msg = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO,
gtk.BUTTONS_CLOSE, data)
msg.set_resizable(1)
msg.set_title(self.dialog_title)
self.img.set_from_file(self.sun_icon)
msg.set_image(self.img)
msg.show_all()
msg.run()
msg.destroy() | Function to display messages to the user |
def titles(self, key, value):
if not key.startswith('245'):
return {
'source': value.get('9'),
'subtitle': value.get('b'),
'title': value.get('a'),
}
self.setdefault('titles', []).insert(0, {
'source': value.get('9'),
'subtitle': value.get('b'),
'title': value.get('a'),
}) | Populate the ``titles`` key. |
def create_default_links(self):
self._plm.manage_aldb_record(0x40, 0xe2, 0x00, self.address,
self.cat, self.subcat, self.product_key)
self.manage_aldb_record(0x41, 0xa2, 0x00, self._plm.address,
self._plm.cat, self._plm.subcat,
self._plm.product_key)
for link in self._stateList:
state = self._stateList[link]
if state.is_responder:
self._plm.manage_aldb_record(0x40, 0xe2, link, self._address,
0x00, 0x00, 0x00)
self.manage_aldb_record(0x41, 0xa2, link, self._plm.address,
state.linkdata1, state.linkdata2,
state.linkdata3)
if state.is_controller:
self._plm.manage_aldb_record(0x41, 0xa2, link, self._address,
0x00, 0x00, 0x00)
self.manage_aldb_record(0x40, 0xe2, link, self._plm.address,
0x00, 0x00, 0x00)
self.read_aldb() | Create the default links between the IM and the device. |
def check_decade_apostrophes_long(text):
err = "dates_times.dates"
msg = u"Apostrophes aren't needed for decades."
regex = "\d\d\d0\'s"
return existence_check(text, [regex], err, msg) | Check the text for dates of the form XXX0's. |
def register(self):
if not self.registered:
self.registered = True
if self.parent:
self.parent.register(self) | Proxy method to register the device with the parent. |
def handle_get_request(self, environ, start_response):
connections = [
s.strip()
for s in environ.get('HTTP_CONNECTION', '').lower().split(',')]
transport = environ.get('HTTP_UPGRADE', '').lower()
if 'upgrade' in connections and transport in self.upgrade_protocols:
self.server.logger.info('%s: Received request to upgrade to %s',
self.sid, transport)
return getattr(self, '_upgrade_' + transport)(environ,
start_response)
try:
packets = self.poll()
except exceptions.QueueEmpty:
exc = sys.exc_info()
self.close(wait=False)
six.reraise(*exc)
return packets | Handle a long-polling GET request from the client. |
def apply_link_ref(offset: int, length: int, value: bytes, bytecode: bytes) -> bytes:
try:
validate_empty_bytes(offset, length, bytecode)
except ValidationError:
raise BytecodeLinkingError("Link references cannot be applied to bytecode")
new_bytes = (
bytecode[:offset]
+ value
+ bytecode[offset + length :]
)
return new_bytes | Returns the new bytecode with `value` put into the location indicated by `offset` and `length`. |
def _get_no_rowscols(self, bbox):
if bbox is None:
return 1, 1
else:
(bb_top, bb_left), (bb_bottom, bb_right) = bbox
if bb_top is None:
bb_top = 0
if bb_left is None:
bb_left = 0
if bb_bottom is None:
bb_bottom = self.grid.code_array.shape[0] - 1
if bb_right is None:
bb_right = self.grid.code_array.shape[1] - 1
return bb_bottom - bb_top + 1, bb_right - bb_left + 1 | Returns tuple of number of rows and cols from bbox |
def _eval(self, node):
if isinstance(node,HTOp):
feed_dict=dict((t,self._cache[t.name]) for t in node.inputs)
node.run(feed_dict)
else:
if isinstance(node,tf.Tensor):
result=self.session.run(node,self._cache)
self._cache[node.name]=result
else:
if node.type =='Assign' or node.type == 'AssignAdd' or node.type == 'AssignSub':
self.session.run(node,self._original_feed_dict) | node is a TensorFlow Op or Tensor from self._exe_order |
def expired(self):
self._data["_killed"] = True
self.save()
raise SessionExpired(self._config.expired_message) | Called when an expired session is atime |
def inverted(self):
return Instance(input=self.output, output=self.input,
annotated_input=self.annotated_output,
annotated_output=self.annotated_input,
alt_inputs=self.alt_outputs,
alt_outputs=self.alt_inputs,
source=self.source) | Return a version of this instance with inputs replaced by outputs and vice versa. |
def construct_context(self, request):
opts = self.model._meta
app_label = opts.app_label
object_name = opts.object_name.lower()
form = self.construct_form(request)
media = self.media(form)
context = {
'user': request.user,
'title': '%s %s' % (self.label, opts.verbose_name_plural.lower()),
'tool': self,
'opts': opts,
'app_label': app_label,
'media': media,
'form': form,
'changelist_url': reverse('admin:%s_%s_changelist' % (
app_label, object_name
))
}
if hasattr(form, 'fieldsets'):
admin_form = helpers.AdminForm(form, form.fieldsets, {})
context['adminform'] = admin_form
return context | Builds context with various required variables. |
def replace_attribute(module_name, attribute_name, new_value, dry_run=True):
init_file = '%s/__init__.py' % module_name
_, tmp_file = tempfile.mkstemp()
with open(init_file) as input_file:
with open(tmp_file, 'w') as output_file:
for line in input_file:
if line.startswith(attribute_name):
line = "%s = '%s'\n" % (attribute_name, new_value)
output_file.write(line)
if not dry_run:
Path(tmp_file).copy(init_file)
else:
log.info(diff(tmp_file, init_file, retcode=None)) | Update a metadata attribute |
def _show_traceback(method):
def m(self, *args, **kwargs):
try:
return(method(self, *args, **kwargs))
except Exception as e:
ip = get_ipython()
if ip is None:
self.log.warning("Exception in widget method %s: %s", method, e, exc_info=True)
else:
ip.showtraceback()
return m | decorator for showing tracebacks in IPython |
def update_queue(self):
self.logger.debug("Updating queue display")
queue_display = []
for i in range(self.queue_display):
try:
if len(self.queue[i][1]) > 40:
songname = self.queue[i][1][:37] + "..."
else:
songname = self.queue[i][1]
except IndexError:
songname = "---"
queue_display.append("{}. {}\n".format(str(i + 1), songname))
self.queuelog.debug(''.join(queue_display))
self.queuelenlog.debug(str(len(self.queue))) | Updates the queue in the music player |
def user_has_reviewed(obj, user):
ctype = ContentType.objects.get_for_model(obj)
try:
models.Review.objects.get(user=user, content_type=ctype,
object_id=obj.id)
except models.Review.DoesNotExist:
return False
return True | Returns True if the user has already reviewed the object. |
def generate_batch(klass, strategy, size, **kwargs):
return make_factory(klass, **kwargs).generate_batch(strategy, size) | Create a factory for the given class, and generate instances. |
def _init_po_files(target, source, env):
nop = lambda target, source, env: 0
if 'POAUTOINIT' in env:
autoinit = env['POAUTOINIT']
else:
autoinit = False
for tgt in target:
if not tgt.exists():
if autoinit:
action = SCons.Action.Action('$MSGINITCOM', '$MSGINITCOMSTR')
else:
msg = 'File ' + repr(str(tgt)) + ' does not exist. ' \
+ 'If you are a translator, you can create it through: \n' \
+ '$MSGINITCOM'
action = SCons.Action.Action(nop, msg)
status = action([tgt], source, env)
if status: return status
return 0 | Action function for `POInit` builder. |
def record_message(self, msg, from_rewarder):
if self.file:
timestamped_message = {
'timestamp': time.time(),
'message': json.loads(msg),
'from_rewarder': from_rewarder,
}
self.file.write(json.dumps(timestamped_message))
self.file.write('\n')
self.file.flush() | Record a message to our rewards.demo file if it is has been opened |
def concatenate_by_line(first, second):
return '\n'.join(x+y for x,y in zip(first.split('\n'), second.split('\n'))) | Zip two strings together, line wise |
def reset_passwd(self, data):
error = False
msg = ""
if len(data["passwd"]) < 6:
error = True
msg = _("Password too short.")
elif data["passwd"] != data["passwd2"]:
error = True
msg = _("Passwords don't match !")
if not error:
passwd_hash = hashlib.sha512(data["passwd"].encode("utf-8")).hexdigest()
user = self.database.users.find_one_and_update({"reset": data["reset_hash"]},
{"$set": {"password": passwd_hash},
"$unset": {"reset": True, "activate": True}})
if user is None:
error = True
msg = _("Invalid reset hash.")
else:
msg = _("Your password has been successfully changed.")
return msg, error | Reset the user password |
def _can_use_numexpr(op, op_str, a, b, dtype_check):
if op_str is not None:
if np.prod(a.shape) > _MIN_ELEMENTS:
dtypes = set()
for o in [a, b]:
if hasattr(o, 'get_dtype_counts'):
s = o.get_dtype_counts()
if len(s) > 1:
return False
dtypes |= set(s.index)
elif isinstance(o, np.ndarray):
dtypes |= {o.dtype.name}
if not len(dtypes) or _ALLOWED_DTYPES[dtype_check] >= dtypes:
return True
return False | return a boolean if we WILL be using numexpr |
def cycles_engine(**kwargs):
logging.info("cycles_engine:")
logging.info("Not ready for production")
experiments = kwargs["experiments"]
farms = []
barn = "raw_dir"
for experiment in experiments:
farms.append([])
if experiment.all_in_memory:
logging.debug("all in memory")
for key in experiment.cell_data_frames:
logging.debug(f"extracting cycles from {key}")
else:
logging.debug("dont have it in memory - need to lookup in the files")
for key in experiment.cell_data_frames:
logging.debug(f"looking up cellpyfile for {key}")
return farms, barn | engine to extract cycles |
def draw_mathtext(self, gc, x, y, s, prop, angle):
if __debug__: verbose.report('RendererAgg.draw_mathtext',
'debug-annoying')
ox, oy, width, height, descent, font_image, used_characters = \
self.mathtext_parser.parse(s, self.dpi, prop)
xd = descent * np.sin(np.deg2rad(angle))
yd = descent * np.cos(np.deg2rad(angle))
x = np.round(x + ox + xd)
y = np.round(y - oy + yd)
self._renderer.draw_text_image(font_image, x, y + 1, angle, gc) | Draw the math text using matplotlib.mathtext |
def git_status_all_repos(cat, hard=True, origin=False, clean=True):
log = cat.log
log.debug("gitter.git_status_all_repos()")
all_repos = cat.PATHS.get_all_repo_folders()
for repo_name in all_repos:
log.info("Repo in: '{}'".format(repo_name))
sha_beg = get_sha(repo_name)
log.debug("Current SHA: '{}'".format(sha_beg))
log.info("Fetching")
fetch(repo_name, log=cat.log)
git_comm = ["git", "status"]
_call_command_in_repo(
git_comm, repo_name, cat.log, fail=True, log_flag=True)
sha_end = get_sha(repo_name)
if sha_end != sha_beg:
log.info("Updated SHA: '{}'".format(sha_end))
return | Perform a 'git status' in each data repository. |
def pidfile(self):
return os.path.abspath(
os.path.expandvars(
os.path.expanduser(
self._pidfile,
),
),
) | Get the absolute path of the pidfile. |
def optimize(qs, info_dict, field_map):
fields = collect_fields(info_dict)
for field in fields:
if field in field_map:
field_name, opt = field_map[field]
qs = (qs.prefetch_related(field_name)
if opt == "prefetch" else qs.select_related(field_name))
return qs | Add either select_related or prefetch_related to fields of the qs |
def filters(self):
params = {}
for _filter in self._filters:
params.update(_filter)
return params | Returns a merged dictionary of filters. |
def write_err(self, text):
stderr = self.stderr
if self.stderr.closed:
stderr = sys.stderr
stderr.write(decode_output(u"\r", target_stream=stderr))
stderr.write(decode_output(CLEAR_LINE, target_stream=stderr))
if text is None:
text = ""
text = decode_output(u"{0}\n".format(text), target_stream=stderr)
self.stderr.write(text)
self.out_buff.write(decode_output(text, target_stream=self.out_buff)) | Write error text in the terminal without breaking the spinner. |
def fielddefsql_from_fieldspec(fieldspec: FIELDSPEC_TYPE) -> str:
sql = fieldspec["name"] + " " + fieldspec["sqltype"]
if "notnull" in fieldspec and fieldspec["notnull"]:
sql += " NOT NULL"
if "autoincrement" in fieldspec and fieldspec["autoincrement"]:
sql += " AUTO_INCREMENT"
if "pk" in fieldspec and fieldspec["pk"]:
sql += " PRIMARY KEY"
else:
if "unique" in fieldspec and fieldspec["unique"]:
sql += " UNIQUE"
if "comment" in fieldspec:
sql += " COMMENT " + sql_quote_string(fieldspec["comment"])
return sql | Returns SQL fragment to define a field. |
def error(self, message: str) -> None:
if len(self._custom_error_message) > 0:
message = self._custom_error_message
self._custom_error_message = ''
lines = message.split('\n')
linum = 0
formatted_message = ''
for line in lines:
if linum == 0:
formatted_message = 'Error: ' + line
else:
formatted_message += '\n ' + line
linum += 1
sys.stderr.write(Fore.LIGHTRED_EX + '{}\n\n'.format(formatted_message) + Fore.RESET)
self.print_help()
sys.exit(1) | Custom error override. Allows application to control the error being displayed by argparse |
def do_template(self,args):
parser = CommandArgumentParser("template")
args = vars(parser.parse_args(args))
print "reading template for stack."
rawStack = self.wrappedStack['rawStack']
template = AwsConnectionFactory.getCfClient().get_template(StackName=rawStack.name)
print template['TemplateBody'] | Print the template for the current stack. template -h for detailed help |
def create_callback(self):
def __callback(question_posed):
logger.debug("%s: asked to choose between: %s" % (self._name, self._qstr(question_posed)))
if self._upto == len(self._data):
logger.error("%s: out of automation data, requested to pick between %s" % (self._name, self._qstr(question_posed)))
raise AutomationException("out of automation data")
question_archived, answer = self._data[self._upto]
if question_archived != question_posed:
logger.error("%s: automation data mismatch, expected question `%s', got question `%s'" % (self._name, self._qstr(question_archived), self._qstr(question_posed)))
resp = question_posed.index(answer)
self._upto += 1
return resp
return __callback | create a callback, suitable to be passed to SenateCounter |
def _coerce_method(converter):
def wrapper(self):
if len(self) == 1:
return converter(self.iloc[0])
raise TypeError("cannot convert the series to "
"{0}".format(str(converter)))
wrapper.__name__ = "__{name}__".format(name=converter.__name__)
return wrapper | Install the scalar coercion methods. |
def longcount_generator(baktun, katun, tun, uinal, kin):
j = to_jd(baktun, katun, tun, uinal, kin)
while True:
yield from_jd(j)
j = j + 1 | Generate long counts, starting with input |
def _unsafe_update_server(self, disabled=False):
id = self.network.id
net = model.Network.from_neutron(self.network)
if id not in _networks:
if disabled:
return
_networks[id] = net
_networks[id].create()
elif disabled:
_networks[id].delete()
del _networks[id]
else:
_networks[id].update(net)
_networks[id] = net | Update server with latest network configuration. |
def csv_tolist(path_to_file, **kwargs):
result = []
encoding = kwargs.get('encoding', 'utf-8')
delimiter = kwargs.get('delimiter', ',')
dialect = kwargs.get('dialect', csv.excel)
_, _ext = path_to_file.split('.', 1)
try:
file = codecs.open(path_to_file, 'r', encoding)
items_file = io.TextIOWrapper(file, encoding=encoding)
result = list(
csv.reader(items_file, delimiter=delimiter, dialect=dialect))
items_file.close()
file.close()
except Exception as ex:
result = []
logger.error('Fail parsing csv to list of rows - {}'.format(ex))
return result | Parse the csv file to a list of rows. |
def interpolate(self, times, proj=PlateCarree()) -> np.ndarray:
if proj not in self.interpolator:
self.interpolator[proj] = interp1d(
np.stack(t.to_pydatetime().timestamp() for t in self.timestamp),
proj.transform_points(
PlateCarree(), *np.stack(self.coords).T
).T,
)
return PlateCarree().transform_points(
proj, *self.interpolator[proj](times)
) | Interpolates a trajectory in time. |
def _check_valid_translation(self, translation):
if not isinstance(translation, np.ndarray) or not np.issubdtype(translation.dtype, np.number):
raise ValueError('Translation must be specified as numeric numpy array')
t = translation.squeeze()
if len(t.shape) != 1 or t.shape[0] != 3:
raise ValueError('Translation must be specified as a 3-vector, 3x1 ndarray, or 1x3 ndarray') | Checks that the translation vector is valid. |
def send_capabilities_request(self, vehicle, name, m):
capability_msg = vehicle.message_factory.command_long_encode(0, 0, mavutil.mavlink.MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES, 0, 1, 0, 0, 0, 0, 0, 0)
vehicle.send_mavlink(capability_msg) | Request an AUTOPILOT_VERSION packet |
def genms(self, scans=[]):
if len(scans):
scanstr = string.join([str(ss) for ss in sorted(scans)], ',')
else:
scanstr = self.allstr
print 'Splitting out all cal scans (%s) with 1s int time' % scanstr
newname = ps.sdm2ms(self.sdmfile, self.sdmfile.rstrip('/')+'.ms', scanstr, inttime='1')
return newname | Generate an MS that contains all calibrator scans with 1 s integration time. |
def coarse_grain(coarse_grain):
partition(coarse_grain.partition)
if len(coarse_grain.partition) != len(coarse_grain.grouping):
raise ValueError('output and state groupings must be the same size')
for part, group in zip(coarse_grain.partition, coarse_grain.grouping):
if set(range(len(part) + 1)) != set(group[0] + group[1]):
raise ValueError('elements in output grouping {0} do not match '
'elements in state grouping {1}'.format(
part, group)) | Validate a macro coarse-graining. |
def field_default(colx, table_name, tables_dict):
"takes sqparse2.ColX, Table"
if colx.coltp.type.lower() == 'serial':
x = sqparse2.parse('select coalesce(max(%s),-1)+1 from %s' % (colx.name, table_name))
return sqex.run_select(x, tables_dict, Table)[0]
elif colx.not_null: raise NotImplementedError('todo: not_null error')
else: return toliteral(colx.default) | takes sqparse2.ColX, Table |
def write(self, file_or_filename):
if isinstance(file_or_filename, basestring):
file = None
try:
file = open(file_or_filename, "wb")
except Exception, detail:
logger.error("Error opening %s." % detail)
finally:
if file is not None:
self._write_data(file)
file.close()
else:
file = file_or_filename
self._write_data(file)
return file | Writes the case data to file. |
def build_gy(self, dae):
if not self.n:
idx = range(dae.m)
dae.set_jac(Gy, 1e-6, idx, idx)
return
Vn = polar(1.0, dae.y[self.a])
Vc = mul(dae.y[self.v], Vn)
Ic = self.Y * Vc
diagVn = spdiag(Vn)
diagVc = spdiag(Vc)
diagIc = spdiag(Ic)
dS = self.Y * diagVn
dS = diagVc * conj(dS)
dS += conj(diagIc) * diagVn
dR = diagIc
dR -= self.Y * diagVc
dR = diagVc.H.T * dR
self.gy_store = sparse([[dR.imag(), dR.real()], [dS.real(),
dS.imag()]])
return self.gy_store | Build line Jacobian matrix |
def visit_Print(self, node):
self.prints_used[(node.lineno, node.col_offset)] = VIOLATIONS["found"][PRINT_FUNCTION_NAME] | Only exists in python 2. |
def _get_hdds(vm_):
_hdds = config.get_cloud_config_value(
'hdds', vm_, __opts__, default=None,
search_global=False
)
hdds = []
for hdd in _hdds:
hdds.append(
Hdd(
size=hdd['size'],
is_main=hdd['is_main']
)
)
return hdds | Construct VM hdds from cloud profile config |
def rpy(self):
x, y, z, w = self.x, self.y, self.z, self.w
roll = math.atan2(2*y*w - 2*x*z, 1 - 2*y*y - 2*z*z)
pitch = math.atan2(2*x*w - 2*y*z, 1 - 2*x*x - 2*z*z)
yaw = math.asin(2*x*y + 2*z*w)
return (roll, pitch, yaw) | Calculates the Roll, Pitch and Yaw of the Quaternion. |
def namedb_get_last_name_import(cur, name, block_id, vtxindex):
query = 'SELECT history_data FROM history WHERE history_id = ? AND (block_id < ? OR (block_id = ? AND vtxindex < ?)) ' + \
'ORDER BY block_id DESC,vtxindex DESC LIMIT 1;'
args = (name, block_id, block_id, vtxindex)
history_rows = namedb_query_execute(cur, query, args)
for row in history_rows:
history_data = json.loads(row['history_data'])
return history_data
return None | Find the last name import for this name |
def list():
fields = [
('Name', 'name'),
('ID', 'id'),
('Owner', 'is_owner'),
('Permission', 'permission'),
]
with Session() as session:
try:
resp = session.VFolder.list()
if not resp:
print('There is no virtual folders created yet.')
return
rows = (tuple(vf[key] for _, key in fields) for vf in resp)
hdrs = (display_name for display_name, _ in fields)
print(tabulate(rows, hdrs))
except Exception as e:
print_error(e)
sys.exit(1) | List virtual folders that belongs to the current user. |
def reopen_last_project(self):
current_project_path = self.get_option('current_project_path',
default=None)
if current_project_path and \
self.is_valid_project(current_project_path):
self.open_project(path=current_project_path,
restart_consoles=False,
save_previous_files=False)
self.load_config() | Reopen the active project when Spyder was closed last time, if any |
def parse_fallback(self):
if self.strict:
raise PywavefrontException("Unimplemented OBJ format statement '%s' on line '%s'"
% (self.values[0], self.line.rstrip()))
else:
logger.warning("Unimplemented OBJ format statement '%s' on line '%s'"
% (self.values[0], self.line.rstrip())) | Fallback method when parser doesn't know the statement |
def update_domain_base_path_mapping(self, domain_name, lambda_name, stage, base_path):
api_id = self.get_api_id(lambda_name)
if not api_id:
print("Warning! Can't update base path mapping!")
return
base_path_mappings = self.apigateway_client.get_base_path_mappings(domainName=domain_name)
found = False
for base_path_mapping in base_path_mappings.get('items', []):
if base_path_mapping['restApiId'] == api_id and base_path_mapping['stage'] == stage:
found = True
if base_path_mapping['basePath'] != base_path:
self.apigateway_client.update_base_path_mapping(domainName=domain_name,
basePath=base_path_mapping['basePath'],
patchOperations=[
{"op" : "replace",
"path" : "/basePath",
"value" : '' if base_path is None else base_path}
])
if not found:
self.apigateway_client.create_base_path_mapping(
domainName=domain_name,
basePath='' if base_path is None else base_path,
restApiId=api_id,
stage=stage
) | Update domain base path mapping on API Gateway if it was changed |
def contamination_detection(self):
self.qualityobject = quality.Quality(self)
self.qualityobject.contamination_finder(input_path=self.sequencepath,
report_path=self.reportpath) | Calculate the levels of contamination in the reads |
def socket(self, blocking=True):
if self._socket_lock.acquire(blocking):
try:
yield self._socket
finally:
self._socket_lock.release() | Blockingly yield the socket |
def _rename_coords(ds, attrs):
for name_int, names_ext in attrs.items():
ds_coord_name = set(names_ext).intersection(set(ds.coords))
if ds_coord_name:
try:
ds = ds.rename({list(ds_coord_name)[0]: name_int})
logging.debug("Rename coord from `{0}` to `{1}` for "
"Dataset `{2}`".format(ds_coord_name,
name_int, ds))
except ValueError:
ds = ds
return ds | Rename coordinates to aospy's internal names. |
def create_fpath_dir(self, fpath: str):
os.makedirs(os.path.dirname(fpath), exist_ok=True) | Creates directory for fpath. |
def open_recruitment(self, n=1):
logger.info("Opening Bot recruitment for {} participants".format(n))
factory = self._get_bot_factory()
bot_class_name = factory("", "", "").__class__.__name__
return {
"items": self.recruit(n),
"message": "Bot recruitment started using {}".format(bot_class_name),
} | Start recruiting right away. |
def as_text(self, is_pretty=False):
values = {
'type': self._type,
self.SERVICE_ENDPOINT: self._service_endpoint,
}
if self._consume_endpoint is not None:
values[self.CONSUME_ENDPOINT] = self._consume_endpoint
if self._values:
for name, value in self._values.items():
values[name] = value
if is_pretty:
return json.dumps(values, indent=4, separators=(',', ': '))
return json.dumps(values) | Return the service as a JSON string. |
def expand(self, line, do_expand, force=False, vislevels=0, level=-1):
lastchild = self.GetLastChild(line, level)
line += 1
while line <= lastchild:
if force:
if vislevels > 0:
self.ShowLines(line, line)
else:
self.HideLines(line, line)
elif do_expand:
self.ShowLines(line, line)
if level == -1:
level = self.GetFoldLevel(line)
if level & stc.STC_FOLDLEVELHEADERFLAG:
if force:
self.SetFoldExpanded(line, vislevels - 1)
line = self.expand(line, do_expand, force, vislevels - 1)
else:
expandsub = do_expand and self.GetFoldExpanded(line)
line = self.expand(line, expandsub, force, vislevels - 1)
else:
line += 1
return line | Multi-purpose expand method from original STC class |
def base_path(self):
path = UrlPath()
parent = self.parent
while parent:
path_prefix = getattr(parent, 'path_prefix', NoPath)
path = path_prefix + path
parent = getattr(parent, 'parent', None)
return path | Calculate the APIs base path |
def _dump_cfg(cfg_file):
if __salt__['file.file_exists'](cfg_file):
with salt.utils.files.fopen(cfg_file, 'r') as fp_:
log.debug(
"zonecfg - configuration file:\n%s",
"".join(salt.utils.data.decode(fp_.readlines()))
) | Internal helper for debugging cfg files |
def tag_helper(tag, items, locked=True, remove=False):
if not isinstance(items, list):
items = [items]
data = {}
if not remove:
for i, item in enumerate(items):
tagname = '%s[%s].tag.tag' % (tag, i)
data[tagname] = item
if remove:
tagname = '%s[].tag.tag-' % tag
data[tagname] = ','.join(items)
data['%s.locked' % tag] = 1 if locked else 0
return data | Simple tag helper for editing a object. |
def _filter_from_dict(current: Dict[str, Any]) -> Dict[str, Any]:
filter_ = dict()
for k, v in current.items():
if isinstance(v, dict):
for sub, v2 in _filter_from_dict(v).items():
filter_[f'{k}.{sub}'] = v2
else:
filter_[k] = v
return filter_ | Takes in a nested dictionary as a filter and returns a flattened filter dictionary |
def flatten_container(self, container):
for names in ARG_MAP.values():
if names[TransformationTypes.KUBERNETES.value]['name'] and \
'.' in names[TransformationTypes.KUBERNETES.value]['name']:
kubernetes_dotted_name = names[TransformationTypes.KUBERNETES.value]['name']
parts = kubernetes_dotted_name.split('.')
result = lookup_nested_dict(container, *parts)
if result:
container[kubernetes_dotted_name] = result
return container | Accepts a kubernetes container and pulls out the nested values into the top level |
def load_psat(cls, fd):
from pylon.io.psat import PSATReader
return PSATReader().read(fd) | Returns a case object from the given PSAT data file. |
def to_string(value, ctx):
if isinstance(value, bool):
return "TRUE" if value else "FALSE"
elif isinstance(value, int):
return str(value)
elif isinstance(value, Decimal):
return format_decimal(value)
elif isinstance(value, str):
return value
elif type(value) == datetime.date:
return value.strftime(ctx.get_date_format(False))
elif isinstance(value, datetime.time):
return value.strftime('%H:%M')
elif isinstance(value, datetime.datetime):
return value.astimezone(ctx.timezone).isoformat()
raise EvaluationError("Can't convert '%s' to a string" % str(value)) | Tries conversion of any value to a string |
def _set_return(self):
if type(self.docs['in']['return']) is list and self.dst.style['out'] not in ['groups', 'numpydoc', 'google']:
lst = self.docs['in']['return']
if lst:
if lst[0][0] is not None:
self.docs['out']['return'] = "%s-> %s" % (lst[0][0], lst[0][1])
else:
self.docs['out']['return'] = lst[0][1]
self.docs['out']['rtype'] = lst[0][2]
else:
self.docs['out']['return'] = self.docs['in']['return']
self.docs['out']['rtype'] = self.docs['in']['rtype'] | Sets the return parameter with description and rtype if any |
def _payload(self):
return self.tcp or self.udp or self.icmpv4 or self.icmpv6 | header that implements PayloadMixin |
def decode_unicode_string(string):
if string.startswith('[BASE64-DATA]') and string.endswith('[/BASE64-DATA]'):
return base64.b64decode(string[len('[BASE64-DATA]'):-len('[/BASE64-DATA]')])
return string | Decode string encoded by `unicode_string` |
def eval_grad(self):
return self.D.T.dot(self.D.dot(self.Y) - self.S) | Compute gradient in spatial domain for variable Y. |
def backend_class(self):
if not self.backend:
return None
if not self.__backend_class:
self.__backend_class = self._get_netengine_backend()
return self.__backend_class | returns python netengine backend class, importing it if needed |
def _get_logs(job_id):
job_id = unicode(job_id)
results = ENGINE.execute(
LOGS_TABLE.select().where(LOGS_TABLE.c.job_id == job_id)).fetchall()
results = [dict(result) for result in results]
for result in results:
result.pop("job_id")
return results | Return any logs for the given job_id from the logs table. |
def _launch_editor(starting_text=''):
"Launch editor, let user write text, then return that text."
editor = os.environ.get('EDITOR', 'vim')
with tempfile.TemporaryDirectory() as dirname:
filename = pathlib.Path(dirname) / 'metadata.yml'
with filename.open(mode='wt') as handle:
handle.write(starting_text)
subprocess.call([editor, filename])
with filename.open(mode='rt') as handle:
text = handle.read()
return text | Launch editor, let user write text, then return that text. |
def contents(self):
rslt = []
for (dpos, dlen, ulen, flag, typcd, nm) in self.toc:
rslt.append(nm)
return rslt | Return the names of the entries |
def number_of_sections_per_neurite(neurites, neurite_type=NeuriteType.all):
return list(sum(1 for _ in n.iter_sections())
for n in iter_neurites(neurites, filt=is_type(neurite_type))) | Get the number of sections per neurite in a collection of neurites |
def _copy_finfo_directory(finfo, out_dir):
out_dir = _get_dir_upload_path(finfo, out_dir)
if not shared.up_to_date(out_dir, finfo):
logger.info("Storing directory in local filesystem: %s" % out_dir)
if os.path.exists(out_dir):
shutil.rmtree(out_dir)
shutil.copytree(finfo["path"], out_dir)
for tmpdir in ["tx", "tmp"]:
if os.path.exists(os.path.join(out_dir, tmpdir)):
shutil.rmtree(os.path.join(out_dir, tmpdir))
os.utime(out_dir, None)
return out_dir | Copy a directory into the final output directory. |
def reqresp_middleware(api=None):
def decorator(middleware_generator):
apply_to_api = hug.API(api) if api else hug.api.from_object(middleware_generator)
class MiddlewareRouter(object):
__slots__ = ('gen', )
def process_request(self, request, response):
self.gen = middleware_generator(request)
return self.gen.__next__()
def process_response(self, request, response, resource, req_succeeded):
return self.gen.send((response, resource))
apply_to_api.http.add_middleware(MiddlewareRouter())
return middleware_generator
return decorator | Registers a middleware function that will be called on every request and response |
def _sendMessage(self, msg):
if not msg:
return
msg = self._collapseMsg(msg)
self.sendStatus(msg) | Collapse and send msg to the master |
def _cache_key_select_analyst(method, self, allow_blank=False, style=None):
key = update_timer(),allow_blank, style
return key | This function returns the key used to decide if method select_analyst has to be recomputed |
def ceil_nearest(x, dx=1):
precision = get_sig_digits(dx)
return round(math.ceil(float(x) / dx) * dx, precision) | ceil a number to within a given rounding accuracy |
def _get_entity_by_name(self, entity_name):
if entity_name in self._registry:
return self._registry[entity_name]
else:
return self._find_entity_in_records_by_class_name(entity_name) | Fetch Entity record with an Entity name |
def find(cls, *args, **kwargs):
return list(cls.collection.find(*args, **kwargs)) | Returns all document dicts that pass the filter |
def _start_new_episode(self):
if self.has_interaction:
self._flush()
self.t = 0
self.has_interaction = False | Bookkeeping to do at the start of each new episode. |
def pick_contiguous_unused_ports(
num_ports,
retry_interval_secs=3,
retry_attempts=5):
for _ in range(retry_attempts):
start_port = portpicker.pick_unused_port()
if start_port is not None:
ports = [start_port + p for p in range(num_ports)]
if all(portpicker.is_port_free(p) for p in ports):
return ports
else:
return_ports(ports)
time.sleep(retry_interval_secs)
raise RuntimeError("Unable to obtain %d contiguous unused ports." % num_ports) | Reserves and returns a list of `num_ports` contiguous unused ports. |
def to_debug_message(
self, text: str, color=None, pos: Optional[Union[Point2, Point3]] = None, size: int = 8
) -> debug_pb.DebugText:
color = self.to_debug_color(color)
pt3d = self.to_debug_point(pos) if isinstance(pos, Point3) else None
virtual_pos = self.to_debug_point(pos) if not isinstance(pos, Point3) else None
return debug_pb.DebugText(color=color, text=text, virtual_pos=virtual_pos, world_pos=pt3d, size=size) | Helper function to create debug texts |
def sync(self, recursive=False):
self.syncTree(recursive=recursive)
self.syncView(recursive=recursive) | Syncs the information from this item to the tree and view. |
def parse_stuple(s,length=2):
if parse_utuple(s, isrx_s, length=length) is None:
raise ValueError("{} is not a valid string tuple.".format(s));
s = quote_subs(s);
return evalt(s); | parse a string of strings. Don't quote strings |
def fail(self, text=u"FAIL", err=False):
self._text = None
_text = text if text else u"FAIL"
err = err or not self.write_to_stdout
self._freeze(_text, err=err) | Set fail finalizer to a spinner. |
def choice_install(self):
pkg_security([self.name])
if not find_package(self.prgnam, self.meta.pkg_path):
self.build()
self.install()
delete(self.build_folder)
raise SystemExit()
else:
self.msg.template(78)
self.msg.pkg_found(self.prgnam)
self.msg.template(78)
raise SystemExit() | Download, build and install package |
def copy_reset(self):
self._filters = []
self._filter_or = False
self._paginate = True
self._paginate_count = 0
self._result_count = None
self._result_limit = 500
self._result_start = 0 | Reset values after instance has been copied |
def subtract_months(self, months: int) -> datetime:
self.value = self.value - relativedelta(months=months)
return self.value | Subtracts a number of months from the current value |
def _submit_to_queue(self, script_file):
if sys.version_info[0] < 3:
process = Popen(['qsub', script_file], stdout=PIPE, stderr=PIPE)
else:
process = Popen(['qsub', script_file], stdout=PIPE, stderr=PIPE, universal_newlines=True)
out, err = process.communicate()
queue_id = None
if process.returncode == 0:
try:
queue_id = int(out.split('.')[0])
except:
logger.critical("Could not parse job id following qsub...")
return SubmitResults(qid=queue_id, out=out, err=err, process=process) | Submit a job script to the queue. |
def process_streamers(self):
in_progress = self._stream_manager.in_progress()
triggered = self.graph.check_streamers(blacklist=in_progress)
for streamer in triggered:
self._stream_manager.process_streamer(streamer, callback=self._handle_streamer_finished) | Check if any streamers should be handed to the stream manager. |
def parse_option(self, option, block_name, *values):
if option.endswith('status'):
status = values[0]
if status not in self.VALID_STATUSES:
raise ValueError(u'Invalid IM status "{0}"'.format(status))
if len(values) > 2:
raise TypeError
if option == 'status':
option = 'start_' + option
key = option.split('_', 1)[0]
self.statuses[key] = values[:2]
elif option == 'status_msg':
if len(values) != 2:
raise TypeError
name, msg = values
self.messages[name] = msg | Parse status, end_status, timer_status and status_msg options. |
def render(self, template_name, **kw):
'Interface method called from `Template.render`'
return self.env.get_template(template_name).render(**kw) | Interface method called from `Template.render` |
def clean_bytes(line):
text = line.decode('utf-8').replace('\r', '').strip('\n')
return re.sub(r'\x1b[^m]*m', '', text).replace("``", "`\u200b`").strip('\n') | Cleans a byte sequence of shell directives and decodes it. |
def application_detail(request, application_id, state=None, label=None):
application = base.get_application(pk=application_id)
state_machine = base.get_state_machine(application)
return state_machine.process(request, application, state, label) | A authenticated used is trying to access an application. |
def _register_service(self):
if (
self._registration is None
and self.specifications
and self.__validated
and self.__controller_on
):
properties = self._ipopo_instance.context.properties.copy()
bundle_context = self._ipopo_instance.bundle_context
self._registration = bundle_context.register_service(
self.specifications,
self._ipopo_instance.instance,
properties,
factory=self.__is_factory,
prototype=self.__is_prototype,
)
self._svc_reference = self._registration.get_reference()
self._ipopo_instance.safe_callback(
ipopo_constants.IPOPO_CALLBACK_POST_REGISTRATION,
self._svc_reference,
) | Registers the provided service, if possible |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.