code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _get_csv_from_section(sections, crumbs, csvs): logger_csvs.info("enter get_csv_from_section: {}".format(crumbs)) _idx = 0 try: for _name, _section in sections.items(): if "measurementTable" in _section: sections[_name]["measurementTable"], csvs = _get_csv_from_table(_section["measurementTable"],"{}{}{}".format(crumbs, _idx, "measurement") , csvs) if "model" in _section: sections[_name]["model"], csvs = _get_csv_from_model(_section["model"], "{}{}{}".format(crumbs, _idx, "model") , csvs) _idx += 1 except Exception as e: logger_csvs.error("get_csv_from_section: {}, {}".format(crumbs, e)) print("Error: get_csv_from_section: {}, {}".format(crumbs, e)) logger_csvs.info("exit get_csv_from_section: {}".format(crumbs)) return sections, csvs
Get table name, variable name, and column values from paleo metadata :param dict sections: Metadata :param str crumbs: Crumbs :param dict csvs: Csv :return dict sections: Metadata :return dict csvs: Csv
def get(self, key): try: data = self._data[key] except KeyError: raise StoreKeyNotFound(key) return data
Get data for given store key. Raise hug.exceptions.StoreKeyNotFound if key does not exist.
def temporary_tag(tag): if tag: CTX.repo.tag(tag) try: yield finally: if tag: CTX.repo.remove_tag(tag)
Temporarily tags the repo
def update(self, section, val, data): k = self.get(section, val) if data is not None and k != data: self.set(section, val, data)
Add a setting to the config, but if same as default or None then no action. This saves the .save writing the defaults `section` (mandatory) (string) the section name in the config E.g. `"agent"` `val` (mandatory) (string) the section name in the config E.g. `"host"` `data` (mandatory) (as appropriate) the new value for the `val`
def _address2long(address): parsed = ipv4.ip2long(address) if parsed is None: parsed = ipv6.ip2long(address) return parsed
Convert an address string to a long.
def transFringe(beta=None, rho=None): m = np.eye(6, 6, dtype=np.float64) if None in (beta, rho): print("warning: 'theta', 'rho' should be positive float numbers.") return m else: m[1, 0] = np.tan(beta) / rho m[3, 2] = -np.tan(beta) / rho return m
Transport matrix of fringe field :param beta: angle of rotation of pole-face in [RAD] :param rho: bending radius in [m] :return: 6x6 numpy array
def common_options(*args, **kwargs): def decorate(f, **kwargs): f = version_option(f) f = debug_option(f) f = verbose_option(f) f = click.help_option("-h", "--help")(f) if not kwargs.get("no_format_option"): f = format_option(f) if not kwargs.get("no_map_http_status_option"): f = map_http_status_option(f) return f return detect_and_decorate(decorate, args, kwargs)
This is a multi-purpose decorator for applying a "base" set of options shared by all commands. It can be applied either directly, or given keyword arguments. Usage: >>> @common_options >>> def mycommand(abc, xyz): >>> ... or >>> @common_options(no_format_option=True) >>> def mycommand(abc, xyz): >>> ...
def _mix(color1, color2, weight=0.5, **kwargs): weight = float(weight) c1 = color1.value c2 = color2.value p = 0.0 if weight < 0 else 1.0 if weight > 1 else weight w = p * 2 - 1 a = c1[3] - c2[3] w1 = ((w if (w * a == -1) else (w + a) / (1 + w * a)) + 1) / 2.0 w2 = 1 - w1 q = [w1, w1, w1, p] r = [w2, w2, w2, 1 - p] return ColorValue([c1[i] * q[i] + c2[i] * r[i] for i in range(4)])
Mixes two colors together.
def start(self, network_name, trunk_name, trunk_type): if not isinstance(network_name, basestring): raise TypeError("network_name can only be an instance of type basestring") if not isinstance(trunk_name, basestring): raise TypeError("trunk_name can only be an instance of type basestring") if not isinstance(trunk_type, basestring): raise TypeError("trunk_type can only be an instance of type basestring") self._call("start", in_p=[network_name, trunk_name, trunk_type])
Starts DHCP server process. in network_name of type str Name of internal network DHCP server should attach to. in trunk_name of type str Name of internal network trunk. in trunk_type of type str Type of internal network trunk. raises :class:`OleErrorFail` Failed to start the process.
def save_assessment(self, assessment_form, *args, **kwargs): if assessment_form.is_for_update(): return self.update_assessment(assessment_form, *args, **kwargs) else: return self.create_assessment(assessment_form, *args, **kwargs)
Pass through to provider AssessmentAdminSession.update_assessment
def revision_list( request, template_name='wakawaka/revision_list.html', extra_context=None ): revision_list = Revision.objects.all() template_context = {'revision_list': revision_list} template_context.update(extra_context or {}) return render(request, template_name, template_context)
Displays a list of all recent revisions.
def has_name_in(cls, names): return cls.sha512.in_({ cls.hash_name(name) for name in names })
Build a filter if the author has any of the given names.
def remove_tweet(self, id): try: self._client.destroy_status(id=id) return True except TweepError as e: if e.api_code in [TWITTER_PAGE_DOES_NOT_EXISTS_ERROR, TWITTER_DELETE_OTHER_USER_TWEET]: return False raise
Delete a tweet. :param id: ID of the tweet in question :return: True if success, False otherwise
def backup(id): filename = dump_database(id) key = "{}.dump".format(id) bucket = user_s3_bucket() bucket.upload_file(filename, key) return _generate_s3_url(bucket, key)
Backup the database to S3.
def get_connection(self, **kwargs): if self.is_rtscts(): return RTSCTSConnection(self, **kwargs) if self.is_dsrdtr(): return DSRDTRConnection(self, **kwargs) else: raise RuntimeError('Serial protocol "%s" is not available.' % ( self.protocol))
Return a serial connection implementation suitable for the specified protocol. Raises ``RuntimeError`` if there is no implementation for the given protocol. .. warn:: This may be a little bit confusing since there is no effective connection but an implementation of a connection pattern.
def comicPageLink(self, comic, url, prevUrl): for handler in _handlers: handler.comicPageLink(comic, url, prevUrl)
Emit an event to inform the handler about links between comic pages. Should be overridden in subclass.
def header(cls, name, type_=Type.String, description=None, default=None, required=None, **options): return cls(name, In.Header, type_, None, description, required=required, default=default, **options)
Define a header parameter.
def _scipy_distribution_positional_args_from_dict(distribution, params): params['loc'] = params.get('loc', 0) if 'scale' not in params: params['scale'] = 1 if distribution == 'norm': return params['mean'], params['std_dev'] elif distribution == 'beta': return params['alpha'], params['beta'], params['loc'], params['scale'] elif distribution == 'gamma': return params['alpha'], params['loc'], params['scale'] elif distribution == 'uniform': return params['min'], params['max'] elif distribution == 'chi2': return params['df'], params['loc'], params['scale'] elif distribution == 'expon': return params['loc'], params['scale']
Helper function that returns positional arguments for a scipy distribution using a dict of parameters. See the `cdf()` function here https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.beta.html#Methods\ to see an example of scipy's positional arguments. This function returns the arguments specified by the \ scipy.stat.distribution.cdf() for tha distribution. Args: distribution (string): \ The scipy distribution name. params (dict): \ A dict of named parameters. Raises: AttributeError: \ If an unsupported distribution is provided.
def adversary(self, name, owner=None, **kwargs): return Adversary(self.tcex, name, owner=owner, **kwargs)
Create the Adversary TI object. Args: owner: name: **kwargs: Return:
def get_nts(self, fin_davidchart): nts = [] with open(fin_davidchart) as ifstrm: hdr_seen = False for line in ifstrm: line = line.rstrip() flds = line.split('\t') if hdr_seen: ntd = self._init_nt(flds) nts.append(ntd) else: if line[:8] == 'Category': assert len(flds) == 13, len(flds) hdr_seen = True sys.stdout.write(" READ {N:5} GO IDs from DAVID Chart: {TSV}\n".format( N=len(nts), TSV=fin_davidchart)) return nts
Read DAVID Chart file. Store each line in a namedtuple.
def property_data_zpool(): property_data = _property_parse_cmd(_zpool_cmd(), { 'allocated': 'alloc', 'autoexpand': 'expand', 'autoreplace': 'replace', 'listsnapshots': 'listsnaps', 'fragmentation': 'frag', }) zpool_size_extra = [ 'capacity-alloc', 'capacity-free', 'operations-read', 'operations-write', 'bandwith-read', 'bandwith-write', 'read', 'write', ] zpool_numeric_extra = [ 'cksum', 'cap', ] for prop in zpool_size_extra: property_data[prop] = { 'edit': False, 'type': 'size', 'values': '<size>', } for prop in zpool_numeric_extra: property_data[prop] = { 'edit': False, 'type': 'numeric', 'values': '<count>', } return property_data
Return a dict of zpool properties .. note:: Each property will have an entry with the following info: - edit : boolean - is this property editable after pool creation - type : str - either bool, bool_alt, size, numeric, or string - values : str - list of possible values .. warning:: This data is probed from the output of 'zpool get' with some suplimental data that is hardcoded. There is no better way to get this informatio aside from reading the code.
def q(cls, **kwargs): redis = cls.get_redis() return QuerySet(cls, redis.sscan_iter(cls.members_key()))
Creates an iterator over the members of this class that applies the given filters and returns only the elements matching them
def floor(cls, x: 'TensorFluent') -> 'TensorFluent': return cls._unary_op(x, tf.floor, tf.float32)
Returns a TensorFluent for the floor function. Args: x: The input fluent. Returns: A TensorFluent wrapping the floor function.
def setup_logging(name, prefix="trademanager", cfg=None): logname = "/var/log/%s/%s_tapp.log" % (prefix, name) logfile = cfg.get('log', 'LOGFILE') if cfg is not None and \ cfg.get('log', 'LOGFILE') is not None and cfg.get('log', 'LOGFILE') != "" else logname loglevel = cfg.get('log', 'LOGLEVEL') if cfg is not None and \ cfg.get('log', 'LOGLEVEL') is not None else logging.INFO logging.basicConfig(filename=logfile, level=loglevel) return logging.getLogger(name)
Create a logger, based on the given configuration. Accepts LOGFILE and LOGLEVEL settings. :param name: the name of the tapp to log :param cfg: The configuration object with logging info. :return: The session and the engine as a list (in that order)
def getAllFtpConnections(self): outputMsg = "Current ftp connections:\n" counter = 1 for k in self.ftpList: outputMsg += str(counter) + ". " + k + " " outputMsg += str(self.ftpList[k]) + "\n" counter += 1 if self.printOutput: logger.info(outputMsg) return self.ftpList
Returns a dictionary containing active ftp connections.
def make_remote_image_result(annotations=None, labels=None): return BuildResult(image_id=BuildResult.REMOTE_IMAGE, annotations=annotations, labels=labels)
Instantiate BuildResult for image not built locally.
def wait_callback(self, callback, msg_type=None, timeout=1.0): event = threading.Event() def cb(msg, **metadata): callback(msg, **metadata) event.set() self.add_callback(cb, msg_type) event.wait(timeout) self.remove_callback(cb, msg_type)
Wait for a SBP message with a callback. Parameters ---------- callback : fn Callback function msg_type : int | iterable Message type to register callback against. Default `None` means global callback. Iterable type adds the callback to all the message types. timeout : float Waiting period
def serialize(self): return json.dumps({ "name": self.name, "ip": self.ip, "port": self.port }, sort_keys=True)
Serializes the Peer data as a simple JSON map string.
def filter_table(table, *column_filters): keep = numpy.ones(len(table), dtype=bool) for name, op_func, operand in parse_column_filters(*column_filters): col = table[name].view(numpy.ndarray) keep &= op_func(col, operand) return table[keep]
Apply one or more column slice filters to a `Table` Multiple column filters can be given, and will be applied concurrently Parameters ---------- table : `~astropy.table.Table` the table to filter column_filter : `str`, `tuple` a column slice filter definition, in one of two formats: - `str` - e.g. ``'snr > 10`` - `tuple` - ``(<column>, <operator>, <operand>)``, e.g. ``('snr', operator.gt, 10)`` multiple filters can be given and will be applied in order Returns ------- table : `~astropy.table.Table` a view of the input table with only those rows matching the filters Examples -------- >>> filter(my_table, 'snr>10', 'frequency<1000') custom operations can be defined using filter tuple definitions: >>> from gwpy.table.filters import in_segmentlist >>> filter(my_table, ('time', in_segmentlist, segs))
def verify_challenge(uri): while True: try: resp = urlopen(uri) challenge_status = json.loads(resp.read().decode('utf8')) except IOError as e: raise ValueError("Error checking challenge: {0} {1}".format( e.code, json.loads(e.read().decode('utf8')))) if challenge_status['status'] == "pending": time.sleep(2) elif challenge_status['status'] == "valid": LOGGER.info("Domain verified!") break else: raise ValueError("Domain challenge did not pass: {0}".format( challenge_status))
Loop until our challenge is verified, else fail.
def rpc_start( working_dir, port, subdomain_index=None, thread=True ): rpc_srv = BlockstackdRPCServer( working_dir, port, subdomain_index=subdomain_index ) log.debug("Starting RPC on port {}".format(port)) if thread: rpc_srv.start() return rpc_srv
Start the global RPC server thread Returns the RPC server thread
def cont(self, event = None): if event is None: event = self.lastEvent if not event: return dwProcessId = event.get_pid() dwThreadId = event.get_tid() dwContinueStatus = event.continueStatus if self.is_debugee(dwProcessId): try: if self.system.has_process(dwProcessId): aProcess = self.system.get_process(dwProcessId) else: aProcess = Process(dwProcessId) aProcess.flush_instruction_cache() except WindowsError: pass win32.ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus) if event == self.lastEvent: self.lastEvent = None
Resumes execution after processing a debug event. @see: dispatch(), loop(), wait() @type event: L{Event} @param event: (Optional) Event object returned by L{wait}. @raise WindowsError: Raises an exception on error.
def _parse_from_incar(filename, key): dirname = os.path.dirname(filename) for f in os.listdir(dirname): if re.search(r"INCAR", f): warnings.warn("INCAR found. Using " + key + " from INCAR.") incar = Incar.from_file(os.path.join(dirname, f)) if key in incar: return incar[key] else: return None return None
Helper function to parse a parameter from the INCAR.
def _valid(m, comment=VALID_RESPONSE, out=None): return _set_status(m, status=True, comment=comment, out=out)
Return valid status.
def OnActivateCard(self, card): SimpleSCardAppEventObserver.OnActivateCard(self, card) self.feedbacktext.SetLabel('Activated card: ' + repr(card)) self.transmitbutton.Enable()
Called when a card is activated by double-clicking on the card or reader tree control or toolbar. In this sample, we just connect to the card on the first activation.
def TimeFromTicks(ticks, tz=None): dt = datetime.datetime.fromtimestamp(ticks, tz=tz) return dt.timetz()
Construct a DB-API time value from the given ticks value. :type ticks: float :param ticks: a number of seconds since the epoch; see the documentation of the standard Python time module for details. :type tz: :class:`datetime.tzinfo` :param tz: (Optional) time zone to use for conversion :rtype: :class:`datetime.time` :returns: time represented by ticks.
def directory(self, key): if key.name != 'directory': key = key.instance('directory') return self.get(key) or []
Retrieves directory entries for given key.
def identify(self, req, resp, resource, uri_kwargs): header = req.get_header("Authorization", False) auth = header.split(" ") if header else None if auth is None or auth[0].lower() != 'basic': return None if len(auth) != 2: raise HTTPBadRequest( "Invalid Authorization header", "The Authorization header for Basic auth should be in form:\n" "Authorization: Basic <base64-user-pass>" ) user_pass = auth[1] try: decoded = base64.b64decode(user_pass).decode() except (TypeError, UnicodeDecodeError, binascii.Error): raise HTTPBadRequest( "Invalid Authorization header", "Credentials for Basic auth not correctly base64 encoded." ) username, _, password = decoded.partition(":") return username, password
Identify user using Authenticate header with Basic auth.
def count_signatures(self, conditions={}): url = self.SIGNS_COUNT_URL + '?' for key, value in conditions.items(): if key is 'ids': value = ",".join(value) url += '&%s=%s' % (key, value) connection = Connection(self.token) connection.set_url(self.production, url) return connection.get_request()
Count all signatures
def fix_command(known_args): settings.init(known_args) with logs.debug_time('Total'): logs.debug(u'Run with settings: {}'.format(pformat(settings))) raw_command = _get_raw_command(known_args) try: command = types.Command.from_raw_script(raw_command) except EmptyCommand: logs.debug('Empty command, nothing to do') return corrected_commands = get_corrected_commands(command) selected_command = select_command(corrected_commands) if selected_command: selected_command.run(command) else: sys.exit(1)
Fixes previous command. Used when `thefuck` called without arguments.
def applyTuple(self, tuple, right, env): if len(right) != 1: raise exceptions.EvaluationError('Tuple (%r) can only be applied to one argument, got %r' % (self.left, self.right)) right = right[0] return tuple(right)
Apply a tuple to something else.
def is_redundant_union_item(first, other): if isinstance(first, ClassType) and isinstance(other, ClassType): if first.name == 'str' and other.name == 'Text': return True elif first.name == 'bool' and other.name == 'int': return True elif first.name == 'int' and other.name == 'float': return True elif (first.name in ('List', 'Dict', 'Set') and other.name == first.name): if not first.args and other.args: return True elif len(first.args) == len(other.args) and first.args: result = all(first_arg == other_arg or other_arg == AnyType() for first_arg, other_arg in zip(first.args, other.args)) return result return False
If union has both items, is the first one redundant? For example, if first is 'str' and the other is 'Text', return True. If items are equal, return False.
def radius(d, offsets, motor_ofs): (mag, motor) = d return (mag + offsets + motor*motor_ofs).length()
return radius give data point and offsets
def bearing(self, format='numeric'): bearings = [] for segment in self: if len(segment) < 2: bearings.append([]) else: bearings.append(segment.bearing(format)) return bearings
Calculate bearing between locations in segments. Args: format (str): Format of the bearing string to return Returns: list of list of float: Groups of bearings between points in segments
def transaction_effects(self, tx_hash, cursor=None, order='asc', limit=10): endpoint = '/transactions/{tx_hash}/effects'.format(tx_hash=tx_hash) params = self.__query_params(cursor=cursor, order=order, limit=limit) return self.query(endpoint, params)
This endpoint represents all effects that occurred as a result of a given transaction. `GET /transactions/{hash}/effects{?cursor,limit,order} <https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-transaction.html>`_ :param str tx_hash: The hex-encoded transaction hash. :param int cursor: A paging token, specifying where to start returning records from. :param str order: The order in which to return rows, "asc" or "desc". :param int limit: Maximum number of records to return. :return: A single transaction's effects. :rtype: dict
def _unhash(hashed, alphabet): number = 0 len_alphabet = len(alphabet) for character in hashed: position = alphabet.index(character) number *= len_alphabet number += position return number
Restores a number tuple from hashed using the given `alphabet` index.
def _InstallInstallers(self): installer_amd64 = glob.glob( os.path.join(args.output_dir, "dbg_*_amd64.exe")).pop() self._CleanupInstall() subprocess.check_call([installer_amd64]) self._CheckInstallSuccess()
Install the installer built by RepackTemplates.
def load(self): self._validate() formatter = SqliteTableFormatter(self.source) formatter.accept(self) return formatter.to_table_data()
Extract tabular data as |TableData| instances from a SQLite database file. |load_source_desc_file| :return: Loaded table data iterator. |load_table_name_desc| =================== ============================================== Format specifier Value after the replacement =================== ============================================== ``%(filename)s`` |filename_desc| ``%(key)s`` ``%(format_name)s%(format_id)s`` ``%(format_name)s`` ``"sqlite"`` ``%(format_id)s`` |format_id_desc| ``%(global_id)s`` |global_id| =================== ============================================== :rtype: |TableData| iterator :raises pytablereader.DataError: If the SQLite database file data is invalid or empty.
def pass_outflow_v1(self): flu = self.sequences.fluxes.fastaccess out = self.sequences.outlets.fastaccess out.q[0] += flu.outflow
Update the outlet link sequence |dam_outlets.Q|.
def reply(self, message, text, opts=None): metadata = Metadata(source=self.actor_urn, dest=message['metadata']['source']).__dict__ metadata['opts'] = opts message = Message(text=text, metadata=metadata, should_log=message['should_log']).__dict__ dest_actor = ActorRegistry.get_by_urn(message['metadata']['dest']) if dest_actor is not None: dest_actor.tell(message) else: raise("Tried to send message to nonexistent actor")
Reply to the sender of the provided message with a message \ containing the provided text. :param message: the message to reply to :param text: the text to reply with :param opts: A dictionary of additional values to add to metadata :return: None
def head(self, *args, **kwargs): return self.session.head(*args, **self.get_kwargs(**kwargs))
Executes an HTTP HEAD. :Parameters: - `args`: Non-keyword arguments - `kwargs`: Keyword arguments
def main(): arg_parser = ArgumentParser( description="Opentrons robot software", parents=[build_arg_parser()]) args = arg_parser.parse_args() run(**vars(args)) arg_parser.exit(message="Stopped\n")
The main entrypoint for the Opentrons robot API server stack. This function - creates and starts the server for both the RPC routes handled by :py:mod:`opentrons.server.rpc` and the HTTP routes handled by :py:mod:`opentrons.server.http` - initializes the hardware interaction handled by either :py:mod:`opentrons.legacy_api` or :py:mod:`opentrons.hardware_control` This function does not return until the server is brought down.
def get_date_range_this_year(now=None): if now is None: now = datetime.datetime.now().date() if now.month <= settings.YEAR_TURNOVER_MONTH: date_start = datetime.datetime(now.year - 1, 8, 1) date_end = datetime.datetime(now.year, 7, 1) else: date_start = datetime.datetime(now.year, 8, 1) date_end = datetime.datetime(now.year + 1, 7, 1) return timezone.make_aware(date_start), timezone.make_aware(date_end)
Return the starting and ending date of the current school year.
def _check_1st_line(line, **kwargs): components = kwargs.get("components", ()) max_first_line = kwargs.get("max_first_line", 50) errors = [] lineno = 1 if len(line) > max_first_line: errors.append(("M190", lineno, max_first_line, len(line))) if line.endswith("."): errors.append(("M191", lineno)) if ':' not in line: errors.append(("M110", lineno)) else: component, msg = line.split(':', 1) if component not in components: errors.append(("M111", lineno, component)) return errors
First line check. Check that the first line has a known component name followed by a colon and then a short description of the commit. :param line: first line :type line: str :param components: list of known component names :type line: list :param max_first_line: maximum length of the first line :type max_first_line: int :return: errors as in (code, line number, *args) :rtype: list
def check_shapes(self): assert self.covX.shape == (self.dx, self.dx), error_msg assert self.covY.shape == (self.dy, self.dy), error_msg assert self.F.shape == (self.dx, self.dx), error_msg assert self.G.shape == (self.dy, self.dx), error_msg assert self.mu0.shape == (self.dx,), error_msg assert self.cov0.shape == (self.dx, self.dx), error_msg
Check all dimensions are correct.
def _link_vertex_and_edge_types(self): for edge_class_name in self._edge_class_names: edge_element = self._elements[edge_class_name] if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties): if edge_element.abstract: continue else: raise AssertionError(u'Found a non-abstract edge class with undefined ' u'endpoint types: {}'.format(edge_element)) from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier edge_schema_element = self._elements[edge_class_name] for from_class in self._subclass_sets[from_class_name]: from_schema_element = self._elements[from_class] from_schema_element.out_connections.add(edge_class_name) edge_schema_element.in_connections.add(from_class) for to_class in self._subclass_sets[to_class_name]: to_schema_element = self._elements[to_class] edge_schema_element.out_connections.add(to_class) to_schema_element.in_connections.add(edge_class_name)
For each edge, link it to the vertex types it connects to each other.
def check_duplicate_axis(self, ds): ret_val = [] geophysical_variables = self._find_geophysical_vars(ds) for name in geophysical_variables: no_duplicates = TestCtx(BaseCheck.HIGH, self.section_titles['5']) axis_map = cfutil.get_axis_map(ds, name) axes = [] for axis, coordinates in axis_map.items(): for coordinate in coordinates: axis_attr = getattr(ds.variables[coordinate], 'axis', None) no_duplicates.assert_true(axis_attr is None or axis_attr not in axes, "'{}' has duplicate axis {} defined by {}".format(name, axis_attr, coordinate)) if axis_attr and axis_attr not in axes: axes.append(axis_attr) ret_val.append(no_duplicates.to_result()) return ret_val
Checks that no variable contains two coordinates defining the same axis. Chapter 5 paragraph 6 If an axis attribute is attached to an auxiliary coordinate variable, it can be used by applications in the same way the `axis` attribute attached to a coordinate variable is used. However, it is not permissible for a [geophysical variable] to have both a coordinate variable and an auxiliary coordinate variable, or more than one of either type of variable, having an `axis` attribute with any given value e.g. there must be no more than one axis attribute for X for any [geophysical variable]. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: compliance_checker.base.Result :return: List of results
def _combine_season_stats(self, table_rows, career_stats, all_stats_dict): most_recent_season = self._most_recent_season if not table_rows: table_rows = [] for row in table_rows: season = self._parse_season(row) try: all_stats_dict[season]['data'] += str(row) except KeyError: all_stats_dict[season] = {'data': str(row)} most_recent_season = season self._most_recent_season = most_recent_season if not career_stats: return all_stats_dict try: all_stats_dict['Career']['data'] += str(next(career_stats)) except KeyError: try: all_stats_dict['Career'] = {'data': str(next(career_stats))} except StopIteration: return all_stats_dict return all_stats_dict
Combine all stats for each season. Since all of the stats are spread across multiple tables, they should be combined into a single field which can be used to easily query stats at once. Parameters ---------- table_rows : generator A generator where each element is a row in a stats table. career_stats : generator A generator where each element is a row in the footer of a stats table. Career stats are kept in the footer, hence the usage. all_stats_dict : dictionary A dictionary of all stats separated by season where each key is the season ``string``, such as '2017', and the value is a ``dictionary`` with a ``string`` of 'data' and ``string`` containing all of the data. Returns ------- dictionary Returns an updated version of the passed all_stats_dict which includes more metrics from the provided table.
def reward_bonus(self, assignment_id, amount, reason): logger.info( 'Award ${} for assignment {}, with reason "{}"'.format( amount, assignment_id, reason ) )
Print out bonus info for the assignment
def _get_belief_package(stmt): belief_packages = [] for st in stmt.supports: parent_packages = _get_belief_package(st) package_stmt_keys = [pkg.statement_key for pkg in belief_packages] for package in parent_packages: if package.statement_key not in package_stmt_keys: belief_packages.append(package) belief_package = BeliefPackage(stmt.matches_key(), stmt.evidence) belief_packages.append(belief_package) return belief_packages
Return the belief packages of a given statement recursively.
def _wrap_result(self, response): if isinstance(response, int): response = self._wrap_response(response) return HandlerResult( status=HandlerStatus.RETURN, message_out=self._response_proto(**response), message_type=self._response_type)
Wraps child's response in a HandlerResult to be sent back to client. Args: response (enum or dict): Either an integer status enum, or a dict of attributes to be added to the protobuf response.
def period(self, unit, size = 1): assert unit in (DAY, MONTH, YEAR), 'Invalid unit: {} of type {}'.format(unit, type(unit)) assert isinstance(size, int) and size >= 1, 'Invalid size: {} of type {}'.format(size, type(size)) return Period((unit, self, size))
Create a new period starting at instant. >>> instant(2014).period('month') Period(('month', Instant((2014, 1, 1)), 1)) >>> instant('2014-2').period('year', 2) Period(('year', Instant((2014, 2, 1)), 2)) >>> instant('2014-2-3').period('day', size = 2) Period(('day', Instant((2014, 2, 3)), 2))
def isnumber(*args): return all(map(lambda c: isinstance(c, int) or isinstance(c, float), args))
Checks if value is an integer, long integer or float. NOTE: Treats booleans as numbers, where True=1 and False=0.
def rgb2gray(img): T = np.linalg.inv(np.array([ [1.0, 0.956, 0.621], [1.0, -0.272, -0.647], [1.0, -1.106, 1.703], ])) r_c, g_c, b_c = T[0] r, g, b = np.rollaxis(as_float_image(img), axis=-1) return r_c * r + g_c * g + b_c * b
Converts an RGB image to grayscale using matlab's algorithm.
def strip(self, inplace=False): if not inplace: t = self.copy() else: t = self for e in t._tree.preorder_edge_iter(): e.length = None t._dirty = True return t
Sets all edge lengths to None
def persistent_id(self, obj): if isinstance(obj, Element): return obj.__class__.__name__, obj.symbol else: return None
Instead of pickling as a regular class instance, we emit a persistent ID.
def startup(self): self.runner.info_log("Startup") if self.browser_config.config.get('enable_proxy'): self.start_proxy()
Start the instance This is mainly use to start the proxy
def get_mailbox(self, email=None, email_hash=None): if email is None: email = self.get_email_address() if email_hash is None: email_hash = self.get_hash(email) url = 'http://{0}/request/mail/id/{1}/format/json/'.format( self.api_domain, email_hash) req = requests.get(url) return req.json()
Return list of emails in given email address or dict with `error` key if mail box is empty. :param email: (optional) email address. :param email_hash: (optional) md5 hash from email address.
def retrieve(self, block_height, headers=None): path = self.path + block_height return self.transport.forward_request( method='GET', path=path, headers=None)
Retrieves the block with the given ``block_height``. Args: block_height (str): height of the block to retrieve. headers (dict): Optional headers to pass to the request. Returns: dict: The block with the given ``block_height``.
def select(self, index=0): if self._clustering: raise exceptions.InvalidClusterCommand future = self._execute( [b'SELECT', ascii(index).encode('ascii')], b'OK') def on_selected(f): self._connection.database = index self.io_loop.add_future(future, on_selected) return future
Select the DB with having the specified zero-based numeric index. New connections always use DB ``0``. :param int index: The database to select :rtype: bool :raises: :exc:`~tredis.exceptions.RedisError` :raises: :exc:`~tredis.exceptions.InvalidClusterCommand`
def tcp_accept(self): self.conn, self.addr = self.tcp_socket.accept() print("[MESSAGE] The connection is established at: ", self.addr) self.tcp_send("> ")
Waiting for a TCP connection.
def extend_memory(self, start_position: int, size: int) -> None: validate_uint256(start_position, title="Memory start position") validate_uint256(size, title="Memory size") before_size = ceil32(len(self._memory)) after_size = ceil32(start_position + size) before_cost = memory_gas_cost(before_size) after_cost = memory_gas_cost(after_size) if self.logger.show_debug2: self.logger.debug2( "MEMORY: size (%s -> %s) | cost (%s -> %s)", before_size, after_size, before_cost, after_cost, ) if size: if before_cost < after_cost: gas_fee = after_cost - before_cost self._gas_meter.consume_gas( gas_fee, reason=" ".join(( "Expanding memory", str(before_size), "->", str(after_size), )) ) self._memory.extend(start_position, size)
Extend the size of the memory to be at minimum ``start_position + size`` bytes in length. Raise `eth.exceptions.OutOfGas` if there is not enough gas to pay for extending the memory.
def _store_in_native_memory(self, data, data_type, addr=None): if addr is not None and self.state.solver.symbolic(addr): raise NotImplementedError('Symbolic addresses are not supported.') type_size = ArchSoot.sizeof[data_type] native_memory_endness = self.state.arch.memory_endness if isinstance(data, int): if addr is None: addr = self._allocate_native_memory(size=type_size//8) value = self.state.solver.BVV(data, type_size) self.state.memory.store(addr, value, endness=native_memory_endness) elif isinstance(data, list): if addr is None: addr = self._allocate_native_memory(size=type_size*len(data)//8) for idx, value in enumerate(data): memory_addr = addr+idx*type_size//8 self.state.memory.store(memory_addr, value, endness=native_memory_endness) return addr
Store in native memory. :param data: Either a single value or a list. Lists get interpreted as an array. :param data_type: Java type of the element(s). :param addr: Native store address. If not set, native memory is allocated. :return: Native addr of the stored data.
def _currentLineExtraSelections(self): if self._currentLineColor is None: return [] def makeSelection(cursor): selection = QTextEdit.ExtraSelection() selection.format.setBackground(self._currentLineColor) selection.format.setProperty(QTextFormat.FullWidthSelection, True) cursor.clearSelection() selection.cursor = cursor return selection rectangularSelectionCursors = self._rectangularSelection.cursors() if rectangularSelectionCursors: return [makeSelection(cursor) \ for cursor in rectangularSelectionCursors] else: return [makeSelection(self.textCursor())]
QTextEdit.ExtraSelection, which highlightes current line
def to_json(self, filename=None, encoding="utf-8", errors="strict", multiline=False, **json_kwargs): if filename and multiline: lines = [_to_json(item, filename=False, encoding=encoding, errors=errors, **json_kwargs) for item in self] with open(filename, 'w', encoding=encoding, errors=errors) as f: f.write("\n".join(lines).decode('utf-8') if sys.version_info < (3, 0) else "\n".join(lines)) else: return _to_json(self.to_list(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
Transform the BoxList object into a JSON string. :param filename: If provided will save to file :param encoding: File encoding :param errors: How to handle encoding errors :param multiline: Put each item in list onto it's own line :param json_kwargs: additional arguments to pass to json.dump(s) :return: string of JSON or return of `json.dump`
def _generate_class_comment(self, data_type): if is_struct_type(data_type): class_type = 'struct' elif is_union_type(data_type): class_type = 'union' else: raise TypeError('Can\'t handle type %r' % type(data_type)) self.emit(comment_prefix) self.emit_wrapped_text( 'The `{}` {}.'.format(fmt_class(data_type.name), class_type), prefix=comment_prefix) if data_type.doc: self.emit(comment_prefix) self.emit_wrapped_text( self.process_doc(data_type.doc, self._docf), prefix=comment_prefix) self.emit(comment_prefix) protocol_str = ( 'This class implements the `DBSerializable` protocol ' '(serialize and deserialize instance methods), which is required ' 'for all Obj-C SDK API route objects.') self.emit_wrapped_text( protocol_str.format(fmt_class_prefix(data_type), class_type), prefix=comment_prefix) self.emit(comment_prefix)
Emits a generic class comment for a union or struct.
def pca_to_mapping(pca,**extra_props): from .axes import sampling_axes method = extra_props.pop('method',sampling_axes) return dict( axes=pca.axes.tolist(), covariance=method(pca).tolist(), **extra_props)
A helper to return a mapping of a PCA result set suitable for reconstructing a planar error surface in other software packages kwargs: method (defaults to sampling axes)
def load(self, filename, bs=512): with open(filename, 'rb') as f: f.seek(GPT_HEADER_OFFSET + 0x0C) header_size = struct.unpack("<I", f.read(4))[0] f.seek(GPT_HEADER_OFFSET) header_data = f.read(header_size) self.header = GPT_HEADER(header_data) if (self.header.signature != GPT_SIGNATURE): raise Exception("Invalid GPT signature") self.__load_partition_entries(f, bs)
Loads GPT partition table. Args: filename (str): path to file or device to open for reading bs (uint): Block size of the volume, default: 512 Raises: IOError: If file does not exist or not readable
def single_traj_from_n_files(file_list, top): traj = None for ff in file_list: if traj is None: traj = md.load(ff, top=top) else: traj = traj.join(md.load(ff, top=top)) return traj
Creates a single trajectory object from a list of files
def make_agent_from_hparams( agent_type, base_env, stacked_env, loop_hparams, policy_hparams, planner_hparams, model_dir, policy_dir, sampling_temp, video_writers=() ): def sim_env_kwargs_fn(): return rl.make_simulated_env_kwargs( base_env, loop_hparams, batch_size=planner_hparams.batch_size, model_dir=model_dir ) planner_kwargs = planner_hparams.values() planner_kwargs.pop("batch_size") planner_kwargs.pop("rollout_agent_type") planner_kwargs.pop("env_type") return make_agent( agent_type, stacked_env, policy_hparams, policy_dir, sampling_temp, sim_env_kwargs_fn, loop_hparams.frame_stack_size, planner_hparams.rollout_agent_type, inner_batch_size=planner_hparams.batch_size, env_type=planner_hparams.env_type, video_writers=video_writers, **planner_kwargs )
Creates an Agent from hparams.
def from_url(cls, url: URL, *, encoding: str='latin1') -> Optional['BasicAuth']: if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding)
Create BasicAuth from url.
def fixpath(path): return os.path.normpath(os.path.realpath(os.path.expanduser(path)))
Uniformly format a path.
def all(self): results = self.data[self.data_type] while self.current < self.total: self.fetch_next_page() results.extend(self.data[self.data_type]) return results
Return all results as a list by automatically fetching all pages. :return: All results. :rtype: ``list``
def get_object(self, identifier, mask=None): if mask is None: mask = "mask[instances[billingItem[item[keyName],category], guest], backendRouter[datacenter]]" result = self.client.call(self.rcg_service, 'getObject', id=identifier, mask=mask) return result
Get a Reserved Capacity Group :param int identifier: Id of the SoftLayer_Virtual_ReservedCapacityGroup :param string mask: override default object Mask
def location(self): location = Element.from_href(self.location_ref) if location and location.name == 'Default': return None return location
The location for this engine. May be None if no specific location has been assigned. :param value: location to assign engine. Can be name, str href, or Location element. If name, it will be automatically created if a Location with the same name doesn't exist. :raises UpdateElementFailed: failure to update element :return: Location element or None
def list2dict(list_of_options): d = {} for key, value in list_of_options: d[key] = value return d
Transforms a list of 2 element tuples to a dictionary
def getAvailableClassesInPackage(package): l = list(x[1] for x in inspect.getmembers(package, inspect.isclass)) modules = list(x[1] for x in inspect.getmembers(package, inspect.ismodule)) for m in modules: l.extend(list(x[1] for x in inspect.getmembers(m, inspect.isclass))) l = [x for x in l if x.__name__[0] != "_"] n = 0 while n < len(l): cls = l[n] if not cls.__module__.startswith(package.__name__): l.pop(n) n -= 1 n += 1 return l
return a list of all classes in the given package whose modules dont begin with '_'
def _call_pip(self, name=None, prefix=None, extra_args=None, callback=None): cmd_list = self._pip_cmd(name=name, prefix=prefix) cmd_list.extend(extra_args) process_worker = ProcessWorker(cmd_list, pip=True, callback=callback) process_worker.sig_finished.connect(self._start) self._queue.append(process_worker) self._start() return process_worker
Call pip in QProcess worker.
def find_in_bids(filename, pattern=None, generator=False, upwards=False, wildcard=True, **kwargs): if upwards and generator: raise ValueError('You cannot search upwards and have a generator') if pattern is None: pattern = _generate_pattern(wildcard, kwargs) lg.debug(f'Searching {pattern} in {filename}') if upwards and filename == find_root(filename): raise FileNotFoundError(f'Could not find file matchting {pattern} in {filename}') if generator: return filename.rglob(pattern) matches = list(filename.rglob(pattern)) if len(matches) == 1: return matches[0] elif len(matches) == 0: if upwards: return find_in_bids(filename.parent, pattern=pattern, upwards=upwards) else: raise FileNotFoundError(f'Could not find file matching {pattern} in {filename}') else: matches_str = '"\n\t"'.join(str(x) for x in matches) raise FileNotFoundError(f'Multiple files matching "{pattern}":\n\t"{matches_str}"')
Find nearest file matching some criteria. Parameters ---------- filename : instance of Path search the root for this file pattern : str glob string for search criteria of the filename of interest (remember to include '*'). The pattern is passed directly to rglob. wildcard : bool use wildcards for unspecified fields or not (if True, add "_*_" between fields) upwards : bool where to keep on searching upwards kwargs : dict Returns ------- Path filename matching the pattern
def get_lux_count(lux_byte): LUX_VALID_MASK = 0b10000000 LUX_CHORD_MASK = 0b01110000 LUX_STEP_MASK = 0b00001111 valid = lux_byte & LUX_VALID_MASK if valid != 0: step_num = (lux_byte & LUX_STEP_MASK) chord_num = (lux_byte & LUX_CHORD_MASK) >> 4 step_val = 2**chord_num chord_val = int(16.5 * (step_val - 1)) count = chord_val + step_val * step_num return count else: raise SensorError("Invalid lux sensor data.")
Method to convert data from the TSL2550D lux sensor into more easily usable ADC count values.
def get_event_stream(self): if self._event_stream is None: self._event_stream = WVAEventStream(self._http_client) return self._event_stream
Get the event stream associated with this WVA Note that this event stream is shared across all users of this WVA device as the WVA only supports a single event stream. :return: a new :class:`WVAEventStream` instance
def normalize_rust_function(self, function, line): function = drop_prefix_and_return_type(function) function = collapse( function, open_string='<', close_string='>', replacement='<T>', exceptions=(' as ',) ) if self.collapse_arguments: function = collapse( function, open_string='(', close_string=')', replacement='' ) if self.signatures_with_line_numbers_re.match(function): function = '{}:{}'.format(function, line) function = self.fixup_space.sub('', function) function = self.fixup_comma.sub(', ', function) function = self.fixup_hash.sub('', function) return function
Normalizes a single rust frame with a function
def _domain_event_pmwakeup_cb(conn, domain, reason, opaque): _salt_send_domain_event(opaque, conn, domain, opaque['event'], { 'reason': 'unknown' })
Domain wakeup events handler
def to_bytes_safe(text, encoding="utf-8"): if not isinstance(text, (bytes, text_type)): raise TypeError("must be string type") if isinstance(text, text_type): return text.encode(encoding) return text
Convert the input value into bytes type. If the input value is string type and could be encode as UTF-8 bytes, the encoded value will be returned. Otherwise, the encoding has failed, the origin value will be returned as well. :param text: the input value which could be string or bytes. :param encoding: the expected encoding be used while converting the string input into bytes. :rtype: :class:`~__builtin__.bytes`
def find_products(self, product_type): if self.filter_prods and product_type in self.LIST_PRODUCTS and product_type not in self.desired_prods: return [] if product_type in self.LIST_PRODUCTS: found_products = self.products.get(product_type, []) else: found_products = [x[0] for x in self.products.items() if x[1] == product_type and (not self.filter_prods or x[0] in self.desired_prods)] found_products = [self._ensure_product_string(x) for x in found_products] declaration = self.PATH_PRODUCTS.get(product_type) if declaration is not None: found_products = [self._process_product_path(x, declaration) for x in found_products] return found_products
Search for products of a given type. Search through the products declared by this IOTile component and return only those matching the given type. If the product is described by the path to a file, a complete normalized path will be returned. The path could be different depending on whether this IOTile component is in development or release mode. The behavior of this function when filter_products has been called is slightly different based on whether product_type is in LIST_PRODUCTS or not. If product type is in LIST_PRODUCTS, then all matching products are returned if product_type itself was passed. So to get all tilebus_definitions you would call ``filter_products('tilebus_definitions')`` By contrast, other products are filtered product-by-product. So there is no way to filter and get **all libraries**. Instead you pass the specific product names of the libraries that you want to ``filter_products`` and those specific libraries are returned. Passing the literal string ``library`` to ``filter_products`` will not return only the libraries, it will return nothing since no library is named ``library``. Args: product_type (str): The type of product that we wish to return. Returns: list of str: The list of all products of the given type. If no such products are found, an empty list will be returned. If filter_products() has been called and the filter does not include this product type, an empty list will be returned.
def read(self): "Read and interpret data from the daemon." status = gpscommon.read(self) if status <= 0: return status if self.response.startswith("{") and self.response.endswith("}\r\n"): self.unpack(self.response) self.__oldstyle_shim() self.newstyle = True self.valid |= PACKET_SET elif self.response.startswith("GPSD"): self.__oldstyle_unpack(self.response) self.valid |= PACKET_SET return 0
Read and interpret data from the daemon.
def initRnaQuantificationSet(self): store = rnaseq2ga.RnaSqliteStore(self._args.filePath) store.createTables()
Initialize an empty RNA quantification set
def back_off_until(self): if self._back_off_until is None: return None with self._back_off_lock: if self._back_off_until is None: return None if self._back_off_until < datetime.datetime.now(): self._back_off_until = None return None return self._back_off_until
Returns the back off value as a datetime. Resets the current back off value if it has expired.
def create_attribute(self,column=None,listType=None,namespace=None, network=None, atype=None, verbose=False): network=check_network(self,network,verbose=verbose) PARAMS=set_param(["column","listType","namespace","network","type"],[column,listType,namespace,network,atype]) response=api(url=self.__url+"/create attribute", PARAMS=PARAMS, method="POST", verbose=verbose) return response
Creates a new edge column. :param column (string, optional): Unique name of column :param listType (string, optional): Can be one of integer, long, double, or string. :param namespace (string, optional): Node, Edge, and Network objects support the default, local, and hidden namespaces. Root networks also support the shared namespace. Custom namespaces may be specified by Apps. :param network (string, optional): Specifies a network by name, or by SUID if the prefix SUID: is used. The keyword CURRENT, or a blank value can also be used to specify the current network. :param atype (string, optional): Can be one of integer, long, double, string, or list. :param verbose: print more
def open(filename, frame='unspecified'): data = Image.load_data(filename) return NormalCloudImage(data, frame)
Creates a NormalCloudImage from a file. Parameters ---------- filename : :obj:`str` The file to load the data from. Must be one of .png, .jpg, .npy, or .npz. frame : :obj:`str` A string representing the frame of reference in which the new image lies. Returns ------- :obj:`NormalCloudImage` The new NormalCloudImage.