code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def move_to_step(self, step): if step not in self._scenario_steps.keys(): raise UndefinedState("step {} not defined in scenario".format(step)) try: session_id = session.sessionId self.session_machines.set_state(session_id, step) except UninitializedStateMachine as e: logger.error(e) return statement(INTERNAL_ERROR_MSG)
Use in cases when you need to move in given step depending on input
def correct(self, calib, temp, we_t, ae_t): if not A4TempComp.in_range(temp): return None if self.__algorithm == 1: return self.__eq1(temp, we_t, ae_t) if self.__algorithm == 2: return self.__eq2(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) if self.__algorithm == 3: return self.__eq3(temp, we_t, ae_t, calib.we_cal_mv, calib.ae_cal_mv) if self.__algorithm == 4: return self.__eq4(temp, we_t, calib.we_cal_mv) raise ValueError("A4TempComp.conv: unrecognised algorithm: %d." % self.__algorithm)
Compute weC from weT, aeT
def exclude(self, pattern): match = translate_pattern(pattern) return self._remove_files(match.match)
Exclude files that match 'pattern'.
def total_msgs(xml): count = 0 for x in xml: count += len(x.message) return count
count total number of msgs
def _create_eval_metric_composite(metric_names: List[str]) -> mx.metric.CompositeEvalMetric: metrics = [EarlyStoppingTrainer._create_eval_metric(metric_name) for metric_name in metric_names] return mx.metric.create(metrics)
Creates a composite EvalMetric given a list of metric names.
def strip_head(sequence, values): values = set(values) return list(itertools.dropwhile(lambda x: x in values, sequence))
Strips elements of `values` from the beginning of `sequence`.
def start_inline(self,stylestack=None): self.stack.append('inline') if self.dirty: self.escpos._raw(' ') if stylestack: self.style(stylestack)
starts an inline entity with an optional style definition
def _make_symlink(self, link_name: str, link_target: str): path = self._file_writer_session.extra_resource_path('dummy') if path: dir_path = os.path.dirname(path) symlink_path = os.path.join(dir_path, link_name) _logger.debug('symlink {} -> {}', symlink_path, link_target) os.symlink(link_target, symlink_path) _logger.info( _('Created symbolic link {symlink_path} to target {symlink_target}.'), symlink_path=symlink_path, symlink_target=link_target )
Make a symlink on the system.
def require_compatible_version(compatible_version, word="File"): if isinstance(compatible_version, str): compatible_version = parse_version(compatible_version) elif not isinstance(compatible_version, Version): raise ValueError("Type of `compatible_version` not understood.") current_version = parse_version(CURRENT_VERSION) if current_version < compatible_version: raise VersionError("{0} written for version >= {1}, this is {2}.".format( word, str(compatible_version), CURRENT_VERSION ))
Check that compatible version of input data is not too new.
def _coerce_to_ndarray(self): data = self._data.astype(object) data[self._mask] = self._na_value return data
coerce to an ndarary of object dtype
def consume_messages(self, batchsize): if not self._reservoir: self.finished = True return for msg in self._reservoir[:batchsize]: yield msg self._reservoir = self._reservoir[batchsize:]
Get messages batch from the reservoir
def _load_ini(self, namespace, config_file): self.LOG.debug("Loading %r..." % (config_file,)) ini_file = ConfigParser.SafeConfigParser() ini_file.optionxform = str if ini_file.read(config_file): self._set_from_ini(namespace, ini_file) else: self.LOG.warning("Configuration file %r not found," " use the command 'pyroadmin --create-config' to create it!" % (config_file,))
Load INI style configuration.
def fetch_image(self, path, dest, user='root'): self.run('test -f %s || curl -L -s -o %s %s' % (dest, dest, path), user=user, ignore_error=True)
Store in the user home directory an image from a remote location.
def send_if_client(fctn): @functools.wraps(fctn) def _send_if_client(self, *args, **kwargs): fctn_map = {'set_quantity': 'set_value'} b = self._bundle if b is not None and b.is_client: method = fctn_map.get(fctn.__name__, fctn.__name__) d = self._filter if hasattr(self, '_filter') \ else {'twig': self.twig} d['bundleid'] = b._bundleid for k, v in kwargs.items(): d[k] = v logger.info('emitting to {}({}) to server'.format(method, d)) b._socketio.emit(method, d) if fctn.__name__ in ['run_compute', 'run_fitting']: self._bundle.client_update() else: return fctn(self, *args, **kwargs) return _send_if_client
Intercept and send to the server if bundle is in client mode.
def nested_insert(self, item_list): if len(item_list) == 1: self[item_list[0]] = LIVVDict() elif len(item_list) > 1: if item_list[0] not in self: self[item_list[0]] = LIVVDict() self[item_list[0]].nested_insert(item_list[1:])
Create a series of nested LIVVDicts given a list
def visit_Num(self, node: ast.Num) -> Union[int, float]: result = node.n self.recomputed_values[node] = result return result
Recompute the value as the number at the node.
def _int_size(x): if -0x80 <= x <= 0x7F: return 1 elif -0x8000 <= x <= 0x7FFF: return 2 elif -0x80000000 <= x <= 0x7FFFFFFF: return 4 elif long(-0x8000000000000000) <= x <= long(0x7FFFFFFFFFFFFFFF): return 8 else: raise RuntimeError("Cannot represent value: " + str(x))
Return the smallest size int that can store the value
def update_loci(self): lbc = {} chroms = sorted([x.range.chr for x in self.loci]) for chrom in chroms: lbc[chrom] = Loci() for x in self.loci: lbc[x.range.chr].add_locus(x) for chrom in sorted(lbc.keys()): if self.verbose: lbc[chrom].verbose = True sys.stderr.write(chrom+"\n") lbc[chrom].overhang = self.overhang lbc[chrom].use_direction = self.use_direction lbc[chrom].merge_down_loci() self.loci = [] for chrom in sorted(lbc.keys()): for locus in lbc[chrom].loci: self.loci.append(locus)
Goes through and combines loci until we have one set meeting our overlap definition
def _Connect(host=None, port=None, user=None, password=None, database=None, client_key_path=None, client_cert_path=None, ca_cert_path=None): connection_args = _GetConnectionArgs( host=host, port=port, user=user, password=password, database=database, client_key_path=client_key_path, client_cert_path=client_cert_path, ca_cert_path=ca_cert_path) conn = MySQLdb.Connect(**connection_args) with contextlib.closing(conn.cursor()) as cursor: _CheckForSSL(cursor) _SetMariaDBMode(cursor) _SetSqlMode(cursor) _SetBinlogFormat(cursor) _SetPacketSizeForFollowingConnections(cursor) _SetEncoding(cursor) _CheckConnectionEncoding(cursor) _CheckLogFileSize(cursor) return conn
Connect to MySQL and check if server fulfills requirements.
def _make_table(values): import collections table = collections.deque() start, end = values[0], values[0] for num, value in enumerate(values): if num == 0: table.append((value, value,)) continue start, end = table.pop() if end == value - 1: table.append((start, value,)) else: table.append((start, end,)) table.append((value, value,)) return tuple(table)
Return a tuple of lookup tables for given values.
def print_item_callback(item): print('&listen [{}, {}={}]'.format( item.get('cmd', ''), item.get('id', ''), item.get('data', '')))
Print an item callback, used by &listen.
def league_scores(self, total_data, time, show_datetime, use_12_hour_format): for match in total_data['matches']: self.scores(self.parse_result(match), add_new_line=not show_datetime) if show_datetime: click.secho(' %s' % Stdout.utc_to_local(match["utcDate"], use_12_hour_format, show_datetime), fg=self.colors.TIME) click.echo()
Prints the data in a pretty format
def _get_placeholder_arg(arg_name, placeholder): if placeholder is None: raise RuntimeWarning(u"placeholder object is None") elif isinstance(placeholder, Placeholder): return placeholder elif isinstance(placeholder, Manager): manager = placeholder try: parent_object = manager.instance except AttributeError: parent_object = None try: placeholder = manager.all()[0] if parent_object is not None: placeholder.parent = parent_object return placeholder except IndexError: raise RuntimeWarning(u"No placeholders found for query '{0}.all.0'".format(arg_name)) else: raise ValueError(u"The field '{0}' does not refer to a placeholder object!".format(arg_name))
Validate and return the Placeholder object that the template variable points to.
def visit_Name(self, node): if isinstance(node.ctx, (ast.Store, ast.Param)): self.result.add(node.id)
Any node with Store or Param context is a new identifier.
def output_vhost(gandi, vhost, paas, output_keys, justify=14): output_generic(gandi, vhost, output_keys, justify) if 'paas_name' in output_keys: output_line(gandi, 'paas_name', paas, justify)
Helper to output a vhost information.
def _initialize_rest(self): if self._submit_context is None: raise ValueError("View has not been created.") job = self._submit_context._job_access() self._view_object = job.get_views(name=self.name)[0]
Used to initialize the View object on first use.
def __get_location(conn, vm_): location = config.get_cloud_config_value( 'location', vm_, __opts__) return conn.ex_get_zone(location)
Need to override libcloud to find the zone.
def send_short_lpp_packet(self, dest_id, data): pk = CRTPPacket() pk.port = CRTPPort.LOCALIZATION pk.channel = self.GENERIC_CH pk.data = struct.pack('<BB', self.LPS_SHORT_LPP_PACKET, dest_id) + data self._cf.send_packet(pk)
Send ultra-wide-band LPP packet to dest_id
def setup_network_agents(self): for i in self.env.G.nodes(): self.env.G.node[i]['agent'] = self.agent_type(environment=self.env, agent_id=i, state=deepcopy(self.initial_states[i]))
Initializes agents on nodes of graph and registers them to the SimPy environment
def acp_users_import(): if not current_user.is_admin: return error("Not authorized to edit users.", 401) if not db: return error('The ACP is not available in single-user mode.', 500) form = UserImportForm() if not form.validate(): return error("Bad Request", 400) fh = request.files['tsv'].stream tsv = fh.read() return tsv
Import users from a TSV file.
def _execute_cmd(plugin, args='', run_type='cmd.retcode'): data = {} all_plugins = list_plugins() if plugin in all_plugins: data = __salt__[run_type]( '{0}{1} {2}'.format(PLUGINDIR, plugin, args), python_shell=False) return data
Execute nagios plugin if it's in the directory with salt command specified in run_type
def _initialize_generator(self,gen,obj=None): if hasattr(obj,"_Dynamic_time_fn"): gen._Dynamic_time_fn = obj._Dynamic_time_fn gen._Dynamic_last = None gen._Dynamic_time = -1 gen._saved_Dynamic_last = [] gen._saved_Dynamic_time = []
Add 'last time' and 'last value' attributes to the generator.
def main(argv): if flags.FLAGS.version: print("GRR server {}".format(config_server.VERSION["packageversion"])) return if not flags.FLAGS.component: raise ValueError("Need to specify which component to start.") if flags.FLAGS.component.startswith("worker"): worker.main([argv]) elif flags.FLAGS.component.startswith("frontend"): frontend.main([argv]) elif flags.FLAGS.component.startswith("admin_ui"): admin_ui.main([argv]) else: raise ValueError("No valid component specified. Got: " "%s." % flags.FLAGS.component)
Sets up all the component in their own threads.
def _convert_to_clusters(c): new_dict = {} n_cluster = 0 logger.debug("_convert_to_cluster: loci %s" % c.loci2seq.keys()) for idl in c.loci2seq: n_cluster += 1 new_c = cluster(n_cluster) new_c.loci2seq[idl] = c.loci2seq[idl] new_dict[n_cluster] = new_c logger.debug("_convert_to_cluster: new ids %s" % new_dict.keys()) return new_dict
Return 1 cluster per loci
def system(*args, **kwargs): kwargs.setdefault('stdout', subprocess.PIPE) proc = subprocess.Popen(args, **kwargs) out, _ = proc.communicate() if proc.returncode: raise SystemExit(proc.returncode) return out.decode('utf-8')
Execute the given bash command
def lookup_field_class(self, field, obj=None, default=None): css = "" if field in self.field_config and 'class' in self.field_config[field]: css = self.field_config[field]['class'] elif default: css = default return css
Looks up any additional class we should include when rendering this field
def line_to_offset(self, line, column): line -= 1 if line >= len(self._line_offsets): return self._text_len elif line < 0: return 0 else: return min(self._line_offsets[line] + max(0, column), self._text_len)
Converts 1-based line number and 0-based column to 0-based character offset into text.
def ping(self): self.last_ping = time.time() try: self.send_message({MESSAGE_TYPE: TYPE_PING}) except NotConnected: self._socket_client.logger.error("Chromecast is disconnected. " + "Cannot ping until reconnected.")
Send a ping message.
def linterp(self, setx, sety, x): if math.isnan(sety[0]) or math.isnan(setx[0]): return np.nan return sety[0] + (x - setx[0]) * ( (sety[1]-sety[0]) / (setx[1]-setx[0]) )
Linear interp of model data values between time steps
def preparse(output_format): try: return templating.preparse(output_format, lambda path: os.path.join(config.config_dir, "templates", path)) except ImportError as exc: if "tempita" in str(exc): raise error.UserError("To be able to use Tempita templates, install the 'tempita' package (%s)\n" " Possibly USING THE FOLLOWING COMMAND:\n" " %s/easy_install tempita" % (exc, os.path.dirname(sys.executable))) raise except IOError as exc: raise error.LoggableError("Cannot read template: {}".format(exc))
Do any special processing of a template, and return the result.
def translate_dict(cls, val): escaped = ', '.join( ["{} -> {}".format(cls.translate_str(k), cls.translate(v)) for k, v in val.items()] ) return 'Map({})'.format(escaped)
Translate dicts to scala Maps
def create_session(username, password): user = User.objects.get_user_by_password(username, password) auth_session_engine = get_config('auth_session_engine') if not user: raise InvalidInput('Username or password incorrect') session_key = random_string(15) while auth_session_engine.get(session_key): session_key = random_string(15) auth_session_engine.set(session_key, user.username, get_config('auth_session_expire')) return {'session_key': session_key, 'user': user}
Create a session for the user, and then return the key.
def from_timedelta(cls, timedelta): from math import ceil units = ceil(timedelta.total_seconds() / cls.time_unit) return cls.create(units)
expects a datetime.timedelta object
def cache_file(self, local_path, ip, path): destination = self.__destination(ip, path) atomicish_move(local_path, destination)
Move a file from a temporary staging area into the cache.
def flush(self): for fp in self.files: fp.flush() if isinstance(fp, int) or hasattr(fp, "fileno"): try: os.fsync(fp) except OSError: pass
flushes all file contents to disc
def _clean_dict(target_dict, whitelist=None): assert isinstance(target_dict, dict) return { ustr(k).strip(): ustr(v).strip() for k, v in target_dict.items() if v not in (None, Ellipsis, [], (), "") and (not whitelist or k in whitelist) }
Convenience function that removes a dicts keys that have falsy values
def match(self, query): if not self.can_select: ch_id = self._device._properties['selected_channel'].get_value() if ch_id in self._ids: self._selected = ch_id else: return response = self._match_dialog(query, self._dialogues['__default__']) if response is not None: return response response = self._match_getters(query, self._getters['__default__']) if response is not None: return response else: for ch_id in self._ids: self._selected = ch_id response = self._match_dialog(query, self._dialogues[ch_id]) if response is not None: return response response = self._match_getters(query, self._getters[ch_id]) if response is not None: return response return self._match_setters(query)
Try to find a match for a query in the channel commands.
def timers(self, filename=None, mpi_rank="0"): if filename is not None: return [self._timers[filename][mpi_rank]] else: return [self._timers[filename][mpi_rank] for filename in self._filenames]
Return the list of timers associated to the given `filename` and MPI rank mpi_rank.
def info_file(self): sources = SBoGrep(self.prgnam).source().split() for source in sources: self.sbo_sources.append(source.split("/")[-1])
Grab sources from .info file and store filename
def _reindex(self): self.index = [] for path in self.paths: target_path = os.path.normpath(os.path.join(BASE_PATH, path)) for root, subdirs, files in os.walk(target_path): for f in files: self.index.append( (os.path.join(root, f).lower(), os.path.join(root, f)))
Create a case-insensitive index of the paths
def _assert_ssl_exc_contains(exc, *msgs): if len(msgs) < 1: raise TypeError( '_assert_ssl_exc_contains() requires ' 'at least one message to be passed.', ) err_msg_lower = str(exc).lower() return any(m.lower() in err_msg_lower for m in msgs)
Check whether SSL exception contains either of messages provided.
def userdata_template(opts, vm_, userdata): if userdata is None: return userdata userdata_template = salt.config.get_cloud_config_value( 'userdata_template', vm_, opts, search_global=False, default=None ) if userdata_template is False: return userdata renderer = opts.get('userdata_template') \ if userdata_template is None \ else userdata_template if renderer is None: return userdata else: render_opts = opts.copy() render_opts.update(vm_) rend = salt.loader.render(render_opts, {}) blacklist = opts['renderer_blacklist'] whitelist = opts['renderer_whitelist'] templated = salt.template.compile_template( ':string:', rend, renderer, blacklist, whitelist, input_data=userdata, ) if not isinstance(templated, six.string_types): try: templated = ''.join(templated.readlines()) except AttributeError: log.warning( 'Templated userdata resulted in non-string result (%s), ' 'converting to string', templated ) templated = six.text_type(templated) return templated
Use the configured templating engine to template the userdata file
def GetRandomDatetime(): seconds_offset = random.randint(0, 60 * 60 * 24 * 7) dt = datetime.today() + timedelta(seconds=seconds_offset) return dt.replace(second=0, microsecond=0)
Return a datetime in the next week.
def rolling_window_sequences(X, index, window_size, target_size, target_column): out_X = list() out_y = list() X_index = list() y_index = list() target = X[:, target_column] for start in range(len(X) - window_size - target_size + 1): end = start + window_size out_X.append(X[start:end]) out_y.append(target[end:end + target_size]) X_index.append(index[start]) y_index.append(index[end]) return np.asarray(out_X), np.asarray(out_y), np.asarray(X_index), np.asarray(y_index)
Create rolling window sequences out of timeseries data.
def evaluate_inline(self, expression, context=None, escape=None, safe_wrapper=None): if context is None: context = {} try: with self._evaluation_context(escape, safe_wrapper): compiled = self._environment.compile_expression(expression) return compiled(**context) except jinja2.TemplateError as error: raise EvaluationError(error.args[0])
Evaluate an inline expression.
def write_tsv(output_stream, *tup, **kwargs): encoding = kwargs.get('encoding') or 'utf-8' value = '\t'.join([s for s in tup]) + '\n' output_stream.write(value.encode(encoding))
Write argument list in `tup` out as a tab-separeated row to the stream.
def powered_up(self): if not self.data.scripts.powered_up: return False for script in self.data.scripts.powered_up: if not script.check(self): return False return True
Returns True whether the card is "powered up".
def message_search(self, text, on_success, peer=None, min_date=None, max_date=None, max_id=None, offset=0, limit=255): raise TWXUnsupportedMethod()
Unsupported in the Bot API
def deploy_token_contract( self, token_supply: int, token_decimals: int, token_name: str, token_symbol: str, token_type: str = 'CustomToken', ): receipt = self.deploy( contract_name=token_type, args=[token_supply, token_decimals, token_name, token_symbol], ) token_address = receipt['contractAddress'] assert token_address and is_address(token_address) token_address = to_checksum_address(token_address) return {token_type: token_address}
Deploy a token contract.
def find_arg(self, name): name = self.normalize_name(name) return self.args.get(name)
Find arg by normalized arg name or parameter name.
def supported(self, tags=None): if tags is None: tags = pep425tags.get_supported() return bool(set(tags).intersection(self.file_tags))
Is this wheel supported on this system?
def add_resource(self, resource): if resource.exists(): self.resources[resource] = self.timekeeper.get_indicator(resource) else: self.resources[resource] = None
Add a resource to the list of interesting resources
def _load_variable(func, program_id, index): n = 64 bufsize = GLsizei(n) length = pointer(GLsizei(0)) size = pointer(GLint(0)) type = pointer(GLenum(0)) uname = create_string_buffer(n) func(program_id, index, bufsize, length, size, type, uname) return size[0], type[0], uname.value.decode('utf8')
Loads the meta data for a uniform or attribute
async def delView(self, iden): if iden == self.iden: raise s_exc.SynErr(mesg='cannot delete the main view') view = self.views.pop(iden, None) if view is None: raise s_exc.NoSuchView(iden=iden) await self.hive.pop(('cortex', 'views', iden)) await view.fini()
Delete a cortex view by iden.
def cyber_observable_check(original_function): def new_function(*args, **kwargs): if not has_cyber_observable_data(args[0]): return func = original_function(*args, **kwargs) if isinstance(func, Iterable): for x in original_function(*args, **kwargs): yield x new_function.__name__ = original_function.__name__ return new_function
Decorator for functions that require cyber observable data.
def set(self, header, value=None): if value is None: for k, v in header.items(): self.headers[k] = v else: self.headers[header] = value
Set header to the value
def BFS(G, start): if start not in G.vertices: raise GraphInsertError("Vertex %s doesn't exist." % (start,)) color = {} pred = {} dist = {} queue = Queue() queue.put(start) for vertex in G.vertices: color[vertex] = 'white' pred[vertex] = None dist[vertex] = 0 while queue.qsize() > 0: current = queue.get() for neighbor in G.vertices[current]: if color[neighbor] == 'white': color[neighbor] = 'grey' pred[neighbor] = current dist[neighbor] = dist[current] + 1 queue.put(neighbor) color[current] = 'black' return pred
Algorithm for breadth-first searching the vertices of a graph.
def operations(nsteps): return {'A': 1 + nsteps, 'M': 2 + nsteps, 'Ml': 2 + nsteps, 'Mr': 1 + nsteps, 'ip_B': 2 + nsteps + nsteps*(nsteps+1)/2, 'axpy': 4 + 2*nsteps + nsteps*(nsteps+1)/2 }
Returns the number of operations needed for nsteps of GMRES
def detect_encoding(value): if six.PY2: null_pattern = tuple(bool(ord(char)) for char in value[:4]) else: null_pattern = tuple(bool(char) for char in value[:4]) encodings = { (0, 0, 0, 1): 'utf-32-be', (0, 1, 0, 1): 'utf-16-be', (1, 0, 0, 0): 'utf-32-le', (1, 0, 1, 0): 'utf-16-le', } return encodings.get(null_pattern, 'utf-8')
Returns the character encoding for a JSON string.
def update(self): "Updates cartesian coordinates for drawing tree graph" self.edges = np.zeros((self.ttree.nnodes - 1, 2), dtype=int) self.verts = np.zeros((self.ttree.nnodes, 2), dtype=float) self.lines = [] self.coords = [] self.update_idxs() self.update_fixed_order() self.assign_vertices() self.assign_coordinates() self.reorient_coordinates()
Updates cartesian coordinates for drawing tree graph
def _fetch_url_data(self, url, username, password, verify, custom_headers): auth = (username, password) url = "%s%s" % (url, STATS_URL) custom_headers.update(headers(self.agentConfig)) self.log.debug("Fetching haproxy stats from url: %s" % url) response = requests.get( url, auth=auth, headers=custom_headers, verify=verify, timeout=self.default_integration_http_timeout ) response.raise_for_status() if PY2: return response.content.splitlines() else: content = response.content decode_fn = getattr(content, "decode", None) if callable(decode_fn): content = content.decode('utf-8') return content.splitlines()
Hit a given http url and return the stats lines
def _exit(self, status_code): exit_func = os._exit if threading.active_count() > 1 else sys.exit exit_func(status_code)
Properly kill Python process including zombie threads.
def build_ml_phyml(alignment, outfile, work_dir=".", **kwargs): phy_file = op.join(work_dir, "work", "aln.phy") AlignIO.write(alignment, file(phy_file, "w"), "phylip-relaxed") phyml_cl = PhymlCommandline(cmd=PHYML_BIN("phyml"), input=phy_file, **kwargs) logging.debug("Building ML tree using PhyML: %s" % phyml_cl) stdout, stderr = phyml_cl() tree_file = phy_file + "_phyml_tree.txt" if not op.exists(tree_file): print("***PhyML failed.", file=sys.stderr) return None sh("cp {0} {1}".format(tree_file, outfile), log=False) logging.debug("ML tree printed to %s" % outfile) return outfile, phy_file
build maximum likelihood tree of DNA seqs with PhyML
def _parse_from_incar(filename, key): dirname = os.path.dirname(filename) for f in os.listdir(dirname): if re.search(r"INCAR", f): warnings.warn("INCAR found. Using " + key + " from INCAR.") incar = Incar.from_file(os.path.join(dirname, f)) if key in incar: return incar[key] else: return None return None
Helper function to parse a parameter from the INCAR.
def file_to_list(path): if not os.path.exists(path): ui.error(c.MESSAGES["path_missing"], path) sys.exit(1) with codecs.open(path, "r", "UTF-8") as contents: lines = contents.read().splitlines() return lines
Return the contents of a file as a list when given a path.
def policy_net(rng_key, batch_observations_shape, num_actions, bottom_layers=None): if bottom_layers is None: bottom_layers = [] bottom_layers.extend([layers.Dense(num_actions), layers.LogSoftmax()]) net = layers.Serial(*bottom_layers) return net.initialize(batch_observations_shape, rng_key), net
A policy net function.
def check_version(current_version: str): app_version = parse_version(current_version) while True: try: _do_check_version(app_version) except requests.exceptions.HTTPError as herr: click.secho('Error while checking for version', fg='red') print(herr) except ValueError as verr: click.secho('Error while checking the version', fg='red') print(verr) finally: gevent.sleep(CHECK_VERSION_INTERVAL)
Check periodically for a new release
def attach_related_file(self, path, mimetype=None): filename = os.path.basename(path) content = open(path, 'rb').read() self.attach_related(filename, content, mimetype)
Attaches a file from the filesystem.
def encode(self, s): s = tf.compat.as_text(s) tokens = self._tokenizer.tokenize(s) tokens = _prepare_tokens_for_encode(tokens) ids = [] for token in tokens: ids.extend(self._token_to_ids(token)) return text_encoder.pad_incr(ids)
Encodes text into a list of integers.
def parentLayer(self): if self._parentLayer is None: from ..agol.services import FeatureService self.__init() url = os.path.dirname(self._url) self._parentLayer = FeatureService(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) return self._parentLayer
returns information about the parent
def trim_tree(self, node): data_len = len(node[-1]) if node[1] == -1 and node[2] == -1: if data_len == 0: return 1 else: return 0 else: if self.trim_tree(node[1]) == 1: node[1] = -1 if self.trim_tree(node[2]) == 1: node[2] = -1 if node[1] == -1 and node[2] == -1: if data_len == 0: return 1 else: return 0
trims the tree for any empty data nodes
def package(self, output=None): self.check_venv() self.override_stage_config_setting('delete_local_zip', False) if self.prebuild_script: self.execute_prebuild_script() self.create_package(output) self.callback('zip') size = human_size(os.path.getsize(self.zip_path)) click.echo(click.style("Package created", fg="green", bold=True) + ": " + click.style(self.zip_path, bold=True) + " (" + size + ")")
Only build the package
def parse_id_literal(ast, _variables=None): if isinstance(ast, (StringValueNode, IntValueNode)): return ast.value return INVALID
Parse an ID value node in the AST.
def show_ipsec_site_connection(self, ipsecsite_conn, **_params): return self.get( self.ipsec_site_connection_path % (ipsecsite_conn), params=_params )
Fetches information of a specific IPsecSiteConnection.
def _chk_flds_fmt(nt_fields, prtfmt): fmtflds = get_fmtflds(prtfmt) missing_data = set(fmtflds).difference(set(nt_fields)) if not missing_data: return msg = ['CANNOT PRINT USING: "{PF}"'.format(PF=prtfmt.rstrip())] for fld in fmtflds: errmrk = "" if fld in nt_fields else "ERROR-->" msg.append(" {ERR:8} {FLD}".format(ERR=errmrk, FLD=fld)) raise Exception('\n'.join(msg))
Check that all fields in the prtfmt have corresponding data in the namedtuple.
def onKeyInCommandEntry(self, event): if event.char == '\r': self.onSendCommand() self.canvas.focus_set()
Called when a key is pressed when the command entry box has focus.
def wildcards_overlap(name1, name2): if not name1 and not name2: return True if not name1 or not name2: return False for matched1, matched2 in _character_matches(name1, name2): if wildcards_overlap(name1[matched1:], name2[matched2:]): return True return False
Return true if two wildcard patterns can match the same string.
def check_running_job_count(): try: job_list = current_k8s_batchv1_api_client.\ list_job_for_all_namespaces() if len(job_list.items) > K8S_MAXIMUM_CONCURRENT_JOBS: return False except ApiException as e: log.error('Something went wrong while getting running job list.') log.error(e) return False return True
Check upper limit on running jobs.
def _validate(self): errors = {} for name, validator in self._validators.items(): value = getattr(self, name) try: validator(self, value) except ValidationError as e: errors[name] = str(e) self._validate_errors = errors
Validate model data and save errors
def K2onSilicon_main(args=None): import argparse parser = argparse.ArgumentParser( description="Run K2onSilicon to find which targets in a " "list call on active silicon for a given K2 campaign.") parser.add_argument('csv_file', type=str, help="Name of input csv file with targets, column are " "Ra_degrees, Dec_degrees, Kepmag") parser.add_argument('campaign', type=int, help='K2 Campaign number') args = parser.parse_args(args) K2onSilicon(args.csv_file, args.campaign)
Function called when `K2onSilicon` is executed on the command line.
def lower_unsupported_metafield_expressions(ir_blocks): def visitor_fn(expression): if not isinstance(expression, expressions.LocalField): return expression if expression.field_name not in constants.UNSUPPORTED_META_FIELDS: return expression raise NotImplementedError( u'Encountered unsupported metafield {} in LocalField {} during construction of ' u'SQL query tree for IR blocks {}.'.format( constants.UNSUPPORTED_META_FIELDS[expression.field_name], expression, ir_blocks)) new_ir_blocks = [ block.visit_and_update_expressions(visitor_fn) for block in ir_blocks ] return new_ir_blocks
Raise exception if an unsupported metafield is encountered in any LocalField expression.
def upix_to_pix(upix): nside = np.power(2, np.floor(np.log2(upix / 4)) / 2).astype(int) pix = upix - 4 * np.power(nside, 2) return pix, nside
Get the nside from a unique pixel number.
def register_library_type(name, type_): if name in LIBRARY_TYPES: raise ArcticException("Library %s already registered as %s" % (name, LIBRARY_TYPES[name])) LIBRARY_TYPES[name] = type_
Register a Arctic Library Type handler
def groups_pools_getGroups(): method = 'flickr.groups.pools.getGroups' data = _doget(method, auth=True) groups = [] if isinstance(data.rsp.groups.group, list): for group in data.rsp.groups.group: groups.append(Group(group.id, name=group.name, \ privacy=group.privacy)) else: group = data.rsp.groups.group groups = [Group(group.id, name=group.name, privacy=group.privacy)] return groups
Get a list of groups the auth'd user can post photos to.
def ssh_get_info(node): ssh_key = "" if node.cloud == "aws": raw_key = node.extra['key_name'] ssh_key = "-i {0}{1}.pem ".format(CONFIG_DIR, raw_key) ssh_user = ssh_calc_aws(node) elif node.cloud == "azure": ssh_user = node.extra['properties']['osProfile']['adminUsername'] elif node.cloud == "gcp": items = node.extra['metadata'].get('items', [{}]) keyname = items['key' == 'ssh-keys'].get('value', "") pos = keyname.find(":") ssh_user = keyname[0:pos] elif node.cloud == "alicloud": ssh_user = "" return ssh_user, ssh_key
Determine ssh-user and ssh-key for node.
def getHourTable(date, pos): table = hourTable(date, pos) return HourTable(table, date)
Returns an HourTable object.
def _get_config(config_file): parser = ConfigParser.SafeConfigParser() if os.path.lexists(config_file): try: log.info('Reading config: %s', config_file) inp = open(config_file) parser.readfp(inp) return parser except (IOError, ConfigParser.ParsingError), err: raise ConfigError("Failed to read configuration %s\n%s" % (config_file, err)) return None
find, read and parse configuraton.
def publish_changes(self, etype, echid): _LOGGING.debug('%s Update: %s, %s', self.name, etype, self.fetch_attributes(etype, echid)) signal = 'ValueChanged.{}'.format(self.cam_id) sender = '{}.{}'.format(etype, echid) if dispatcher: dispatcher.send(signal=signal, sender=sender) self._do_update_callback('{}.{}.{}'.format(self.cam_id, etype, echid))
Post updates for specified event type.
def hide(self): thr_is_alive = self._spin_thread and self._spin_thread.is_alive() if thr_is_alive and not self._hide_spin.is_set(): self._hide_spin.set() sys.stdout.write("\r") self._clear_line() sys.stdout.flush()
Hide the spinner to allow for custom writing to the terminal.
def GetLastKey(self, voice=1): voice_obj = self.GetChild(voice) if voice_obj is not None: key = BackwardSearch(KeyNode, voice_obj, 1) if key is not None: return key else: if hasattr(self, "key"): return self.key else: if hasattr(self, "key"): return self.key
key as in musical key, not index