code
stringlengths
51
2.34k
docstring
stringlengths
11
171
def get(relation: str) -> 'Predicate': for pred in Predicate: if relation.upper() in (pred.value.fortran, pred.value.wql.upper(), pred.value.math): return pred return None
Return enum instance corresponding to input relation string
def prepend_urls(self): return [ url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/generate%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('generate'), name="api_tileset_generate"), url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/download%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('download'), name="api_tileset_download"), url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/status%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('status'), name="api_tileset_status"), url(r"^(?P<resource_name>%s)/(?P<pk>\w[\w/-]*)/stop%s$" % (self._meta.resource_name, trailing_slash()), self.wrap_view('stop'), name="api_tileset_stop"), ]
Add the following array of urls to the Tileset base urls
def read_string(self): length = self._stream.read_var_uint32() return self._stream.read_string(length)
Reads and returns a length-delimited string.
def find_by_tags(and_tags=None, or_tags=None, _connection=None, page_size=100, page_number=0, sort_by=enums.DEFAULT_SORT_BY, sort_order=enums.DEFAULT_SORT_ORDER): err = None if not and_tags and not or_tags: err = "You must supply at least one of either and_tags or or_tags." if and_tags and not isinstance(and_tags, (tuple, list)): err = "The and_tags argument for Video.find_by_tags must an " err += "iterable" if or_tags and not isinstance(or_tags, (tuple, list)): err = "The or_tags argument for Video.find_by_tags must an " err += "iterable" if err: raise exceptions.PyBrightcoveError(err) atags = None otags = None if and_tags: atags = ','.join([str(t) for t in and_tags]) if or_tags: otags = ','.join([str(t) for t in or_tags]) return connection.ItemResultSet('find_videos_by_tags', Video, _connection, page_size, page_number, sort_by, sort_order, and_tags=atags, or_tags=otags)
List videos given a certain set of tags.
def list_records_for_build_config_set(id, page_size=200, page_index=0, sort="", q=""): data = list_records_for_build_config_set_raw(id, page_size, page_index, sort, q) if data: return utils.format_json_list(data)
Get a list of BuildRecords for the given BuildConfigSetRecord
def package_version(package_name: str) -> typing.Optional[str]: try: return pkg_resources.get_distribution(package_name).version except (pkg_resources.DistributionNotFound, AttributeError): return None
Returns package version as a string, or None if it couldn't be found.
def import_script(self, script_name): filename = os.path.abspath(script_name) with open(filename, 'r') as script_file: self.toolbox_file.write(script_file.read())
Finds the script file and copies it into the toolbox
def init(options, use_sigterm_handler=True): global _AUTH, _OPTIONS if isinstance(options, dict): _OPTIONS = DEFAULT_OPTIONS.copy() _OPTIONS.update(options) else: for optname, optvalue in DEFAULT_OPTIONS.iteritems(): if hasattr(options, optname): _OPTIONS[optname] = getattr(options, optname) else: _OPTIONS[optname] = optvalue if _OPTIONS['testmethods']: def fortytwo(request): "test GET method" return 42 def ping(request): "test POST method" return request.payload_params() register(fortytwo, 'GET') register(ping, 'POST') if _OPTIONS['auth_basic_file']: _AUTH = HttpAuthentication(_OPTIONS['auth_basic_file'], realm = _OPTIONS['auth_basic']).parse_file() for name, cmd in _COMMANDS.iteritems(): if cmd.safe_init: LOG.info("safe_init: %r", name) cmd.safe_init(_OPTIONS) if use_sigterm_handler: signal.signal(signal.SIGTERM, sigterm_handler) signal.signal(signal.SIGINT, sigterm_handler)
Must be called just after registration, before anything else
def replicaStatus(self, url): params = {"f" : "json"} url = url + "/status" return self._get(url=url, param_dict=params, securityHandler=self._securityHandler, proxy_port=self._proxy_port, proxy_url=self._proxy_url)
gets the replica status when exported async set to True
def _instantiate_session(self, method_name, proxy=None, *args, **kwargs): if 'manager' in kwargs: session_class = getattr(kwargs['manager'], method_name) del kwargs['manager'] else: session_class = getattr(self._provider_manager, method_name) if proxy is None: try: return session_class(bank_id=self._catalog_id, *args, **kwargs) except AttributeError: return session_class(*args, **kwargs) else: try: return session_class(bank_id=self._catalog_id, proxy=proxy, *args, **kwargs) except AttributeError: return session_class(proxy=proxy, *args, **kwargs)
Instantiates a provider session
def withSize(cls, minimum, maximum): class X(cls): subtypeSpec = cls.subtypeSpec + constraint.ValueSizeConstraint( minimum, maximum) X.__name__ = cls.__name__ return X
Creates a subclass with value size constraint.
def extract_options(options, names): result = {} for name, value in copy(options).items(): if name in names: result[name] = value del options[name] return result
Return options for names and remove it from given options in-place.
def run(bam_file, data, out_dir): m = {"base": None, "secondary": []} m.update(_mirbase_stats(data, out_dir)) m["secondary"].append(_seqcluster_stats(data, out_dir))
Create several log files
def modified_created(instance): if 'modified' in instance and 'created' in instance and \ instance['modified'] < instance['created']: msg = "'modified' (%s) must be later or equal to 'created' (%s)" return JSONError(msg % (instance['modified'], instance['created']), instance['id'])
`modified` property must be later or equal to `created` property
def free(**kwargs): output, err = cli_syncthing_adapter.free(kwargs['path']) click.echo("%s" % output, err=err)
Stop synchronization of directory.
def expected_values(early_mean=early_mean, late_mean=late_mean, switchpoint=switchpoint): n = len(disasters_array) return concatenate( (ones(switchpoint) * early_mean, ones(n - switchpoint) * late_mean))
Discrepancy measure for GOF using the Freeman-Tukey statistic
def annotated_cfg(self, start_point=None): targets = [ ] for simrun, stmt_idx in self._targets: targets.append((simrun.addr, stmt_idx)) l.debug("Initializing AnnoCFG...") anno_cfg = AnnotatedCFG(self.project, self._cfg) for simrun, stmt_idx in self._targets: if stmt_idx is not -1: anno_cfg.set_last_statement(simrun.addr, stmt_idx) for n in self._cfg.graph.nodes(): run = n if run.addr in self.chosen_statements: if self.chosen_statements[run.addr] is True: anno_cfg.add_block_to_whitelist(run.addr) else: anno_cfg.add_statements_to_whitelist(run.addr, self.chosen_statements[run.addr]) for src, dst in self._cfg.graph.edges(): run = src if dst.addr in self.chosen_statements and src.addr in self.chosen_statements: anno_cfg.add_exit_to_whitelist(run.addr, dst.addr) return anno_cfg
Returns an AnnotatedCFG based on slicing result.
def nom_diam_pipe(self): ID = pc.diam_circle(self.area_pipe_min) return pipe.ND_SDR_available(ID, self.sdr)
The nominal diameter of the LFOM pipe
def Mean(self): values = [v for v, _ in self.data if v is not None] if not values: return None return sum(values) // len(values)
Return the arithmatic mean of all values.
def git_root(self): if not getattr(self, "_git_folder", None): root_folder = os.path.abspath(self.parent_dir) while not os.path.exists(os.path.join(root_folder, '.git')): if root_folder == '/': raise HarpoonError("Couldn't find a .git folder", start_at=self.parent_dir) root_folder = os.path.dirname(root_folder) self._git_folder = root_folder return self._git_folder
Find the root git folder
def end(self) -> "GameNode": node = self while node.variations: node = node.variations[0] return node
Follows the main variation to the end and returns the last node.
def pretty_print_model(devicemodel): PRETTY_PRINT_MODEL = logging.info(PRETTY_PRINT_MODEL % devicemodel) if 'traits' in devicemodel: for trait in devicemodel['traits']: logging.info(' Trait %s' % trait) else: logging.info('No traits') logging.info('')
Prints out a device model in the terminal by parsing dict.
def create_identity(self, user, sp_mapping, **extra_config): return { out_attr: getattr(user, user_attr) for user_attr, out_attr in sp_mapping.items() if hasattr(user, user_attr) }
Generate an identity dictionary of the user based on the given mapping of desired user attributes by the SP
def MatrixDiagPart(a): r = np.zeros(a.shape[:-2] + (min(a.shape[-2:]),)) for coord in np.ndindex(a.shape[:-2]): pos = coord + (Ellipsis,) r[pos] = np.diagonal(a[pos]) return r,
Batched diag op that returns only the diagonal elements.
def configure_proxy(self, curl_object): curl_object.setopt(curl_object.PROXY, self._proxy_hostname) curl_object.setopt(curl_object.PROXYPORT, self._proxy_port) curl_object.setopt(curl_object.PROXYTYPE, curl_object.PROXYTYPE_SOCKS5) if self._proxy_user and self._proxy_passwd: curl_object.setopt(curl_object.PROXYUSERPWD, '%s:%s' % (self._proxy_user, self._proxy_port))
configure pycurl proxy settings
def handler_for(obj): for handler_type in handlers: if isinstance(obj, handler_type): return handlers[handler_type] try: for handler_type in handlers: if issubclass(obj, handler_type): return handlers[handler_type] except TypeError: pass
return the handler for the object type
def update_bounds(self, bounds): starts = bounds[:,0,:] ends = bounds[:,1,:] self.bounds = bounds self.lengths = np.sqrt(((ends - starts)**2).sum(axis=1)) vertices, normals, colors = self._process_reference() self.tr.update_vertices(vertices) self.tr.update_normals(normals)
Update cylinders start and end positions
def _check_consistent_units(self): if isinstance(self._pot,list): if self._roSet and self._pot[0]._roSet: assert m.fabs(self._ro-self._pot[0]._ro) < 10.**-10., 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' if self._voSet and self._pot[0]._voSet: assert m.fabs(self._vo-self._pot[0]._vo) < 10.**-10., 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' else: if self._roSet and self._pot._roSet: assert m.fabs(self._ro-self._pot._ro) < 10.**-10., 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' if self._voSet and self._pot._voSet: assert m.fabs(self._vo-self._pot._vo) < 10.**-10., 'Physical conversion for the actionAngle object is not consistent with that of the Potential given to it' return None
Internal function to check that the set of units for this object is consistent with that for the potential
def mkdir_p(path): try: os.makedirs(path) except EnvironmentError as exc: if exc.errno != errno.EEXIST: raise
Create a new directory; ignore if it already exists.
def roles_dict(path, repo_prefix="", repo_sub_dir=""): exit_if_path_not_found(path) aggregated_roles = {} roles = os.walk(path).next()[1] for role in roles: for sub_role in roles_dict(path + "/" + role, repo_prefix="", repo_sub_dir=role + "/"): aggregated_roles[role + "/" + sub_role] = role + "/" + sub_role for role in roles: if is_role(os.path.join(path, role)): if isinstance(role, basestring): role_repo = "{0}{1}".format(repo_prefix, role_name(role)) aggregated_roles[role] = role_repo return aggregated_roles
Return a dict of role names and repo paths.
def register_sub(self, o): if o.subopt in self.subopt_map: raise OptionConflictError( "conflicting suboption handlers for `%s'" % o.subopt, o) self.subopt_map[o.subopt] = o
Register argument a suboption for `self`.
def push_byte(self, stack_pointer, byte): stack_pointer.decrement(1) addr = stack_pointer.value self.memory.write_byte(addr, byte)
pushed a byte onto stack
def mount_path(source, target, bind=False): cmd = ['mount'] if bind: cmd.append('--bind') cmd.append(source) cmd.append(target) r = util.subp(cmd) if r.return_code != 0: raise MountError('Could not mount docker container:\n' + ' '.join(cmd) + '\n%s' % r.stderr.decode(sys.getdefaultencoding()))
Subprocess call to mount dev at path.
def alter(self, operation, timeout=None, metadata=None, credentials=None): new_metadata = self.add_login_metadata(metadata) try: return self.any_client().alter(operation, timeout=timeout, metadata=new_metadata, credentials=credentials) except Exception as error: if util.is_jwt_expired(error): self.retry_login() new_metadata = self.add_login_metadata(metadata) return self.any_client().alter(operation, timeout=timeout, metadata=new_metadata, credentials=credentials) else: raise error
Runs a modification via this client.
def get (self, feature): if type(feature) == type([]): feature = feature[0] if not isinstance(feature, b2.build.feature.Feature): feature = b2.build.feature.get(feature) assert isinstance(feature, b2.build.feature.Feature) if self.feature_map_ is None: self.feature_map_ = {} for v in self.all_: if v.feature not in self.feature_map_: self.feature_map_[v.feature] = [] self.feature_map_[v.feature].append(v.value) return self.feature_map_.get(feature, [])
Returns all values of 'feature'.
def reset(self): logger.debug('StackInABox({0}): Resetting...' .format(self.__id)) for k, v in six.iteritems(self.services): matcher, service = v logger.debug('StackInABox({0}): Resetting Service {1}' .format(self.__id, service.name)) service.reset() self.services = {} self.holds = {} logger.debug('StackInABox({0}): Reset Complete' .format(self.__id))
Reset StackInABox to a like-new state.
def convert_camel_case_keys(original_dict: Dict[str, Any]) -> Dict[str, Any]: new_dict = dict() for key, val in original_dict.items(): if isinstance(val, dict): new_dict[convert_camel_case_string(key)] = convert_camel_case_keys(val) else: new_dict[convert_camel_case_string(key)] = val return new_dict
Converts all keys of a dict from camel case to snake case, recursively
def _BuildPluginRequest(self, app_id, challenge_data, origin): client_data_map = {} encoded_challenges = [] app_id_hash_encoded = self._Base64Encode(self._SHA256(app_id)) for challenge_item in challenge_data: key = challenge_item['key'] key_handle_encoded = self._Base64Encode(key.key_handle) raw_challenge = challenge_item['challenge'] client_data_json = model.ClientData( model.ClientData.TYP_AUTHENTICATION, raw_challenge, origin).GetJson() challenge_hash_encoded = self._Base64Encode( self._SHA256(client_data_json)) encoded_challenges.append({ 'appIdHash': app_id_hash_encoded, 'challengeHash': challenge_hash_encoded, 'keyHandle': key_handle_encoded, 'version': key.version, }) key_challenge_pair = (key_handle_encoded, challenge_hash_encoded) client_data_map[key_challenge_pair] = client_data_json signing_request = { 'type': 'sign_helper_request', 'signData': encoded_challenges, 'timeoutSeconds': U2F_SIGNATURE_TIMEOUT_SECONDS, 'localAlways': True } return client_data_map, json.dumps(signing_request)
Builds a JSON request in the form that the plugin expects.
def sub(self, path): if is_collection(path): path = path_join(path) return AppDir(path_join(self.path, path))
Returns AppDir instance for given subdirectory name.
def vcf_to_df(canvasvcfs, exonbed, cpus): df = pd.DataFrame() p = Pool(processes=cpus) results = [] args = [(x, exonbed, i) for (i, x) in enumerate(canvasvcfs)] r = p.map_async(vcf_to_df_worker, args, callback=results.append) r.wait() for res in results: df = df.append(res, ignore_index=True) return df
Compile a number of vcf files into tsv file for easy manipulation
def init_search(self): if self.verbose: logger.info("Initializing search.") for generator in self.generators: graph = generator(self.n_classes, self.input_shape).generate( self.default_model_len, self.default_model_width ) model_id = self.model_count self.model_count += 1 self.training_queue.append((graph, -1, model_id)) self.descriptors.append(graph.extract_descriptor()) if self.verbose: logger.info("Initialization finished.")
Call the generators to generate the initial architectures for the search.
def return_rri(self, begsam, endsam): interval = endsam - begsam dat = empty(interval) k = 0 with open(self.filename, 'rt') as f: [next(f) for x in range(12)] for j, datum in enumerate(f): if begsam <= j < endsam: dat[k] = float64(datum[:datum.index('\t')]) k += 1 if k == interval: break return dat
Return raw, irregularly-timed RRI.
def estimate_params(self,burn=None,clip=10.0,alpha=0.32): mle = self.get_mle() out = odict() for param in mle.keys(): out[param] = self.estimate(param,burn=burn,clip=clip,alpha=alpha) return out
Estimate all source parameters
def id(self): if not self._id: self._id = tuple(sorted(map(str, self))) return self._id
A unique, stable, hashable id over the set of pinned artifacts.
def repo_name(msg): try: path = msg['msg']['commit']['path'] project = path.split('.git')[0][9:] except KeyError: project = msg['msg']['commit']['repo'] return project
Compat util to get the repo name from a message.
def convertToMapPic(byteString, mapWidth): data = [] line = "" for idx,char in enumerate(byteString): line += str(ord(char)) if ((idx+1)%mapWidth)==0: data.append(line) line = "" return data
convert a bytestring into a 2D row x column array, representing an existing map of fog-of-war, creep, etc.
async def connect_to_endpoints(self, *endpoints: ConnectionConfig) -> None: self._throw_if_already_connected(*endpoints) await asyncio.gather( *(self._await_connect_to_endpoint(endpoint) for endpoint in endpoints), loop=self.event_loop )
Connect to the given endpoints and await until all connections are established.
def start(): from . import app root, apiopts, conf = app.get_app(__opts__) if not apiopts.get('disable_ssl', False): if 'ssl_crt' not in apiopts or 'ssl_key' not in apiopts: logger.error("Not starting '%s'. Options 'ssl_crt' and " "'ssl_key' are required if SSL is not disabled.", __name__) return None verify_certs(apiopts['ssl_crt'], apiopts['ssl_key']) cherrypy.server.ssl_module = 'builtin' cherrypy.server.ssl_certificate = apiopts['ssl_crt'] cherrypy.server.ssl_private_key = apiopts['ssl_key'] if 'ssl_chain' in apiopts.keys(): cherrypy.server.ssl_certificate_chain = apiopts['ssl_chain'] cherrypy.quickstart(root, apiopts.get('root_prefix', '/'), conf)
Start the server loop
def _notify_change(self): d = self.declaration self._notify_count -= 1 if self._notify_count == 0: self.adapter.notifyDataSetChanged(now=True) self.get_context().timed_call( 500, self._queue_pending_calls)
After all changes have settled, tell Java it changed
def delete_expired(self, expires): meta = self.model._meta with commit_on_success(): self.get_all_expired(expires).update(hidden=True) cursor = self.connection_for_write().cursor() cursor.execute( 'DELETE FROM {0.db_table} WHERE hidden=%s'.format(meta), (True, ), )
Delete all expired taskset results.
def do_terminateInstance(self,args): parser = CommandArgumentParser("terminateInstance") parser.add_argument(dest='instance',help='instance index or name'); args = vars(parser.parse_args(args)) instanceId = args['instance'] try: index = int(instanceId) instances = self.scalingGroupDescription['AutoScalingGroups'][0]['Instances'] instanceId = instances[index] except ValueError: pass client = AwsConnectionFactory.getEc2Client() client.terminate_instances(InstanceIds=[instanceId['InstanceId']]) self.do_printInstances("-r")
Terminate an EC2 instance
def delete_user(self, id): self.assert_has_permission('scim.write') uri = self.uri + '/Users/%s' % id headers = self._get_headers() logging.debug("URI=" + str(uri)) logging.debug("HEADERS=" + str(headers)) response = self.session.delete(uri, headers=headers) logging.debug("STATUS=" + str(response.status_code)) if response.status_code == 200: return response else: logging.error(response.content) response.raise_for_status()
Delete user with given id.
def geometric_center(coords, periodic): max_vals = periodic theta = 2 * np.pi * (coords / max_vals) eps = np.cos(theta) * max_vals / (2 * np.pi) zeta = np.sin(theta) * max_vals / (2 * np.pi) eps_avg = eps.sum(axis=0) zeta_avg = zeta.sum(axis=0) theta_avg = np.arctan2(-zeta_avg, -eps_avg) + np.pi return theta_avg * max_vals / (2 * np.pi)
Geometric center taking into account periodic boundaries
def _export_to2marc(self, key, value): def _is_for_cds(value): return 'CDS' in value def _is_for_hal(value): return 'HAL' in value and value['HAL'] def _is_not_for_hal(value): return 'HAL' in value and not value['HAL'] result = [] if _is_for_cds(value): result.append({'c': 'CDS'}) if _is_for_hal(value): result.append({'c': 'HAL'}) elif _is_not_for_hal(value): result.append({'c': 'not HAL'}) return result
Populate the ``595`` MARC field.
def _maybe_add_noise_to_payload(self, payload): if self.measurement_noise is not None: payload["measurement-noise"] = self.measurement_noise if self.gate_noise is not None: payload["gate-noise"] = self.gate_noise
Set the gate noise and measurement noise of a payload.
def send(remote_host=None): my_data = get() if not remote_host: remote_host = nago.extensions.settings.get('server') remote_node = nago.core.get_node(remote_host) remote_node.send_command('checkresults', 'post', **my_data) return "checkresults sent to %s" % remote_host
Send local nagios data to a remote nago instance
def update_note(note_id: NoteId, body: Body=None, done: Done=None) -> Note: if note_id != 1: raise NotFoundError('Note does not exist') new_note = note.copy() if body is not None: new_note['body'] = body if done is not None: new_note['done'] = done return new_note
Update an existing note.
def read_files(project, ext): project_path = os.path.join(os.path.dirname(__file__), project) file_list = os.listdir(project_path) flist = [] flist_path = [] for f in file_list: f_path = os.path.join(project_path, f) if os.path.isfile(f_path) and f.endswith(ext) and f != "__init__.py": flist.append(f.split('.')[0]) flist_path.append(f_path) return flist, flist_path
Reads files inside the input project directory.
def send(self, data): self.logger.debug('Send data: {}'.format(data)) if not self.connected: self.logger.warning('Connection not established. Return...') return self.websocket.send(json.dumps(data))
Sends data to the server.
def _validate_dtype(self, dtype): if dtype is not None: dtype = pandas_dtype(dtype) if dtype.kind == 'V': raise NotImplementedError("compound dtypes are not implemented" " in the {0} constructor" .format(self.__class__.__name__)) return dtype
validate the passed dtype
def _create_output_from_match(self, match_result): full_path = match_result['full_path'] path = self._get_relative_path(full_path) return LinterOutput(self.name, path, match_result['msg'])
As isort outputs full path, we change it to relative path.
def ip_to_int(ip): ret = 0 for octet in ip.split('.'): ret = ret * 256 + int(octet) return ret
Converts an IP address to an integer
def build_post_policy(self, expiration_time, conditions): assert type(expiration_time) == time.struct_time, \ 'Policy document must include a valid expiration Time object' return '{"expiration": "%s",\n"conditions": [%s]}' % \ (time.strftime(boto.utils.ISO8601, expiration_time), ",".join(conditions))
Taken from the AWS book Python examples and modified for use with boto
def camelcase_search_options(self, options): new_options = {} for key in options: value = options[key] new_key = SEARCH_OPTIONS_DICT.get(key, key) if new_key == 'sort': value = SORT_OPTIONS_DICT.get(value, value) elif new_key == 'timePivot': value = TIME_PIVOT_OPTIONS_DICT.get(value, value) elif new_key in BOOLEAN_SEARCH_OPTIONS: value = str(value).lower() new_options[new_key] = value return new_options
change all underscored variants back to what the API is expecting
def update_refund(self, refund_id, refund_deets): request = self._put('transactions/refunds/' + str(refund_id), refund_deets) return self.responder(request)
Updates an existing refund transaction.
def payload(self): if self.args_rdf_name: result_cls = self.classes.get(self.args_rdf_name, rdfvalue.RDFString) result = result_cls.FromSerializedString( self.Get("args"), age=self.args_age) return result
The payload property automatically decodes the encapsulated data.
def _get_private_key(cls, private_key_path, private_key_passphrase): if private_key_path is None or private_key_passphrase is None: return None with open(private_key_path, 'rb') as key: p_key = serialization.load_pem_private_key( key.read(), password=private_key_passphrase.encode(), backend=default_backend()) return p_key.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption())
Get Snowflake private key by path or None.
def get(self): self.loaded = True if not hasattr(self.manager, "get"): return if not self.get_details: return new = self.manager.get(self) if new: self._add_details(new._info)
Gets the details for the object.
def _spin_up(self, images, duration): total = 0 for image in images: self.microbit.display.show(image) time.sleep(0.05) total += 0.05 if total >= duration: return remaining = duration - total self._full_speed_rumble(images[-2:], remaining) self.set_display()
Simulate the motors getting warmed up.
def use_comparative_gradebook_column_view(self): self._object_views['gradebook_column'] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_gradebook_column_view() except AttributeError: pass
Pass through to provider GradebookColumnLookupSession.use_comparative_gradebook_column_view
def reset_password(self, action_token, signed_data): try: action = "reset-password" user = get_user_by_action_token(action, action_token) if not user or not user.signed_data_match(signed_data, action): raise mocha_exc.AppError("Verification Invalid!") if request.method == "POST": password = request.form.get("password", "").strip() password_confirm = request.form.get("password_confirm", "").strip() if not password or password != password_confirm: raise exceptions.AuthError( "Password is missing or passwords don't match") user.change_password(password) user.set_email_verified(True) session_set_require_password_change(False) flash_success("Password updated successfully!") return redirect(__options__.get("login_view") or self.login) return {"action_token": action_token, "signed_data": signed_data} except (mocha_exc.AppError, exceptions.AuthError) as ex: flash_error(str(ex)) except Exception as e: logging.exception(e) flash_error("Unable to reset password") return redirect(self.login)
Reset the user password. It was triggered by LOST-PASSWORD
def sonTraceRootPath(): import sonLib.bioio i = os.path.abspath(sonLib.bioio.__file__) return os.path.split(os.path.split(os.path.split(i)[0])[0])[0]
function for finding external location
def requires_conversion(cls, fileset, file_format): if file_format is None: return False try: filset_format = fileset.format except AttributeError: return False else: return (file_format != filset_format)
Checks whether the fileset matches the requested file format
def hsv_to_rgb(self,HSV): "hsv to linear rgb" gammaRGB = self._ABC_to_DEF_by_fn(HSV,hsv_to_rgb) return self._ungamma_rgb(gammaRGB)
hsv to linear rgb
def _build_block_element_list(self): return sorted( [e for e in self.block_elements.values() if not e.virtual], key=lambda e: e.priority, reverse=True )
Return a list of block elements, ordered from highest priority to lowest.
def save_config( self, cmd="configuration write", confirm=False, confirm_response="" ): output = self.enable() output += self.config_mode() output += self.send_command(cmd) output += self.exit_config_mode() return output
Save Config on Mellanox devices Enters and Leaves Config Mode
def cat(tensors, dim=0): assert isinstance(tensors, (list, tuple)) if len(tensors) == 1: return tensors[0] return torch.cat(tensors, dim)
Efficient version of torch.cat that avoids a copy if there is only a single element in a list
def size(self): total_size = 0 for dir_path, dir_names, filenames in os.walk(self.dir): for f in filenames: fp = os.path.join(dir_path, f) total_size += os.path.getsize(fp) return total_size
Returns the size of the cache in bytes.
def diff(self): latest = self.latest current = NetJsonParser(self.json()) return diff(current, latest)
shortcut to netdiff.diff
def extract_props(self, settings): props = {} for param in self.call_parameters: if param in settings: props[param] = settings[param] else: props[param] = None return props
Extract all valuable properties to be displayed
def message(self): return '\n'.join( [self.from_email, str(self.to), self.subject, self.body])
Convert the message to a mime compliant email string
def examples(directory): source_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "examples") try: shutil.copytree(source_dir, os.path.join(directory, "examples")) except OSError as e: if e.errno == errno.EEXIST: six.print_("Folder examples is exists.")
Generate example strategies to target folder
def _load_from_configs(self, filename): config_filename = os.path.join(self._config_path, filename) if os.path.exists(config_filename): try: f = open(config_filename, 'r') content = ''.join(f.readlines()) f.close() return content except Exception as err: raise err else: raise IOError("File not found: {}".format(config_filename))
Return content of file which located in configuration directory
def security(self): if self._resources is None: self.__init() if "security" in self._resources: url = self._url + "/security" return _security.Security(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port, initialize=True) else: return None
returns an object to work with the site security
def plot_groups_unplaced(self, fout_dir=".", **kws_pltargs): hdrgos = self.grprobj.get_hdrgos_unplaced() pltargs = PltGroupedGosArgs(self.grprobj, fout_dir=fout_dir, **kws_pltargs) return self._plot_groups_hdrgos(hdrgos, pltargs)
Plot GO DAGs for groups of user GOs which are not in a section.
def isimplify(geoids): s0 = list(geoids) for i in range(10): s1 = simplify(s0) if len(s1) == len(s0): return s1 s0 = s1
Iteratively simplify until the set stops getting smaller.
def read(self, fileobj): fileobj.seek(self._dataoffset, 0) data = fileobj.read(self.datalength) return len(data) == self.datalength, data
Return if all data could be read and the atom payload
def find_in_history(self, tocursor, start_idx, backward): if start_idx is None: start_idx = len(self.history) step = -1 if backward else 1 idx = start_idx if len(tocursor) == 0 or self.hist_wholeline: idx += step if idx >= len(self.history) or len(self.history) == 0: return "", len(self.history) elif idx < 0: idx = 0 self.hist_wholeline = True return self.history[idx], idx else: for index in range(len(self.history)): idx = (start_idx+step*(index+1)) % len(self.history) entry = self.history[idx] if entry.startswith(tocursor): return entry[len(tocursor):], idx else: return None, start_idx
Find text 'tocursor' in history, from index 'start_idx
def argmin(attrs, inputs, proto_obj): axis = attrs.get('axis', 0) keepdims = attrs.get('keepdims', 1) argmin_op = symbol.argmin(inputs[0], axis=axis, keepdims=keepdims) cast_attrs = {'dtype': 'int64'} return 'cast', cast_attrs, argmin_op
Returns indices of the minimum values along an axis.
def read_stdin(self): text = sys.stdin.read() if sys.version_info[0] < 3 and text is not None: text = text.decode(sys.stdin.encoding or 'utf-8') return text
Reads STDIN until the end of input and returns a unicode string.
def sql(self, stmt, parameters=None, bulk_parameters=None): if stmt is None: return None data = _create_sql_payload(stmt, parameters, bulk_parameters) logger.debug( 'Sending request to %s with payload: %s', self.path, data) content = self._json_request('POST', self.path, data=data) logger.debug("JSON response for stmt(%s): %s", stmt, content) return content
Execute SQL stmt against the crate server.
def init(): from metapack.appurl import SearchUrl import metapack as mp from os import environ SearchUrl.initialize() mp.Downloader.context.update(environ)
Initialize features that are normally initialized in the CLI
def getUserInfo(self): userJson = self.httpGet(ReaderUrl.USER_INFO_URL) result = json.loads(userJson, strict=False) self.userId = result['userId'] return result
Returns a dictionary of user info that google stores.
def _exec_method(self, method, request, data, *args, **kw): if self._is_data_method(request): return method(data, request, *args, **kw) else: return method(request, *args, **kw)
Execute appropriate request handler.
def load_yaml(filename): with open(filename) as f: ydoc = yaml.safe_load(f.read()) return (ydoc, serialize_tojson(ydoc))
Loads a YAML-formatted file.
def _read_dataframes_100k(path): import pandas ratings = pandas.read_table(os.path.join(path, "u.data"), names=['userId', 'movieId', 'rating', 'timestamp']) movies = pandas.read_csv(os.path.join(path, "u.item"), names=['movieId', 'title'], usecols=[0, 1], delimiter='|', encoding='ISO-8859-1') return ratings, movies
reads in the movielens 100k dataset
def apply(self, value, input_ranges, backend=None): from .overlay import CompositeOverlay if backend is None: backend = Store.current_backend kwargs = {k: v for k, v in self.kwargs.items() if k != 'output_type'} if isinstance(value, CompositeOverlay) and len(value) == 1: value = value.values()[0] if self.transfer_parameters: plot_opts = Store.lookup_options(backend, value, 'plot').kwargs kwargs.update({k: v for k, v in plot_opts.items() if k in self.operation.params()}) transformed = self.operation(value, input_ranges=input_ranges, **kwargs) if self.transfer_options: Store.transfer_options(value, transformed, backend) return transformed
Apply the compositor on the input with the given input ranges.
def create_horizontal_plane(): v = np.array([ [1., 0., 0.], [-1., 0., 0.], [0., 0., 1.], [0., 0., -1.] ]) f = [[0, 1, 2], [3, 1, 0]] return Mesh(v=v, f=f)
Creates a horizontal plane.
def load_probe(name): if op.exists(name): path = name else: curdir = op.realpath(op.dirname(__file__)) path = op.join(curdir, 'probes/{}.prb'.format(name)) if not op.exists(path): raise IOError("The probe `{}` cannot be found.".format(name)) return MEA(probe=_read_python(path))
Load one of the built-in probes.
def ordered(start, edges, predicate=None, inverse=False): s, o = 'sub', 'obj' if inverse: s, o = o, s for edge in edges: if predicate is not None and edge['pred'] != predicate: print('scoop!') continue if edge[s] == start: yield edge yield from Graph.ordered(edge[o], edges, predicate=predicate)
Depth first edges from a SciGraph response.