code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def text(self, value): self._text = value self.timestamps.edited = datetime.datetime.utcnow() self.touch(True)
Set the text value. Args: value (str): Text value.
codesearchnet
def update_parser(self, parser): self._parser = parser ini_str = argparse_to_ini(parser) configp = configparser.ConfigParser(allow_no_value=True) configp.read_dict(self._config) configp.read_string(ini_str) self._config.update( {s: dict(configp.items...
Update config dictionary with declared arguments in an argparse.parser New variables will be created, and existing ones overridden. Args: parser (argparse.ArgumentParser): parser to read variables from
juraj-google-style
def get_samples_live(self, sensor_id, last=None): url = 'https: headers = self.__gen_headers() headers['Content-Type'] = 'application/json' params = {'sensorId': sensor_id} if last: params['last'] = last url = self.__append_url_params(url, params) r = requests.get(url, headers=header...
Get recent samples, one sample per second for up to the last 2 minutes. Args: sensor_id (string): hexadecimal id of the sensor to query, e.g. ``0x0013A20040B65FAD`` last (string): starting range, as ISO8601 timestamp Returns: list: dictionary objects containing sample data
codesearchnet
def import_global(name, modules=None, exceptions=DummyException, locals_=None, globals_=None, level=(- 1)): frame = None try: if ((locals_ is None) or (globals_ is None)): import inspect frame = inspect.stack()[1][0] if (locals_ is None): locals_ = fra...
Import the requested items into the global scope WARNING! this method _will_ overwrite your global scope If you have a variable named "path" and you call import_global('sys') it will be overwritten with sys.path Args: name (str): the name of the module to import, e.g. sys modules (str): the modules to import, use Non...
codesearchnet
def get_content(url, headers={}, decoded=True): logging.debug(('get_content: %s' % url)) req = request.Request(url, headers=headers) if cookies: cookies.add_cookie_header(req) req.headers.update(req.unredirected_hdrs) response = urlopen_with_retry(req) data = response.read() cont...
Gets the content of a URL via sending a HTTP GET request. Args: url: A URL. headers: Request headers used by the client. decoded: Whether decode the response body using UTF-8 or the charset specified in Content-Type. Returns: The content as a string.
codesearchnet
def get_remote_info(url_id): try: data = _send_request(url_id) except Exception as e: sys.stderr.write("Seeder GET error: ") sys.stderr.write(str(e.message)) return None return _convert_to_wakat_format(data)
Download data and convert them to dict used in frontend. Args: url_id (str): ID used as identification in Seeder. Returns: dict: Dict with data for frontend or None in case of error.
juraj-google-style
def write_compounds(self, stream, compounds, properties=None): self._write_entries(stream, compounds, self.convert_compound_entry, properties)
Write iterable of compounds as YAML object to stream. Args: stream: File-like object. compounds: Iterable of compound entries. properties: Set of compound properties to output (or None to output all).
codesearchnet
def remove_species(self, species): new_sites = [] species = [get_el_sp(sp) for sp in species] for site in self._sites: new_sp_occu = {sp: amt for (sp, amt) in site.species.items() if (sp not in species)} if (len(new_sp_occu) > 0): new_sites.append(Site(new_sp_occu, site.coords, p...
Remove all occurrences of a species from a molecule. Args: species: Species to remove.
codesearchnet
def make_new(self, rev): return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
Make a copy of the current instance, but with a new rev. Args: rev: the name of the revision for the new object.
juraj-google-style
def _assign_stablehlo_quantization_config_or_populate_default(self, args): if self.experimental_stablehlo_quantizer_config is not None and Optimize.DEFAULT not in self.optimizations: args['quantization_config'] = self.experimental_stablehlo_quantizer_config elif Optimize.DEFAULT in self.optimizations an...
Assigns `QuantizationConfig` to `args` or populate default. Args: args: Dictionary of argument names and associated values.
github-repos
def from_control_flow_context_def(context_def, import_scope=None): if context_def.HasField('cond_ctxt'): return CondContext.from_proto(context_def.cond_ctxt, import_scope=import_scope) if context_def.HasField('while_ctxt'): return WhileContext.from_proto(context_def.while_ctxt, import_scope=impo...
Deserializes `context_def` into the appropriate ControlFlowContext. Args: context_def: ControlFlowContextDef proto import_scope: Optional `string`. Name scope to add. Returns: A ControlFlowContext subclass
github-repos
def CreateKey(self, private_key=None): if private_key is None: private_key = bytes(Random.get_random_bytes(32)) key = KeyPair(priv_key=private_key) self._keys[key.PublicKeyHash.ToBytes()] = key return key
Create a KeyPair Args: private_key (iterable_of_ints): (optional) 32 byte private key Returns: KeyPair: a KeyPair instance
juraj-google-style
def read(self): if self._cache: img = self._cache.get(self._position) if (img is not None): ret = True else: if (self._position != self._get_real_position()): self._set_real_position(self._position) (ret, img) = self._vcap.read() ...
Read the next frame. If the next frame have been decoded before and in the cache, then return it directly, otherwise decode, cache and return it. Returns: ndarray or None: Return the frame if successful, otherwise None.
codesearchnet
def send(self, message, socket_): if not socket_: raise TensorForceError("No socket given in call to `send`!") elif not isinstance(message, dict): raise TensorForceError("Message to be sent must be a dict!") message = msgpack.packb(message) len_ = len(mes...
Sends a message (dict) to the socket. Message consists of a 8-byte len header followed by a msgpack-numpy encoded dict. Args: message: The message dict (e.g. {"cmd": "reset"}) socket_: The python socket object to use.
juraj-google-style
def Add(self, file_desc_proto): proto_name = file_desc_proto.name if proto_name not in self._file_desc_protos_by_file: self._file_desc_protos_by_file[proto_name] = file_desc_proto elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: raise DescriptorDatabaseConflictingDefiniti...
Adds the FileDescriptorProto and its types to this database. Args: file_desc_proto: The FileDescriptorProto to add. Raises: DescriptorDatabaseConflictingDefinitionError: if an attempt is made to add a proto with the same name but different definition than an exisiting proto in the database.
juraj-google-style
def _generate_pickle_name(gt): grammar_textfile_name = os.path.basename(gt) head, tail = os.path.splitext(grammar_textfile_name) if tail == '.txt': tail = '' cache_dir = user_cache_dir(appname='YAPF', appauthor='Google', version=yapf_version) return cache_dir + os.sep + head + tail + '-py' +...
Get the filepath to write a pickle file to given the path of a grammar textfile. The returned filepath should be in a user-specific cache directory. Args: gt (str): path to grammar text file Returns: str: path to pickle file
github-repos
def to_json(self): return {'name': self.name, 'segments': [segment.to_json() for segment in self.segments], 'meta': self.meta}
Converts track to a JSON serializable format Returns: Map with the name, and segments of the track.
codesearchnet
def traverse_pagination(response, endpoint, content_filter_query, query_params): results = response.get('results', []) page = 1 while response.get('next'): page += 1 response = endpoint().post(content_filter_query, **dict(query_params, page=page)) re...
Traverse a paginated API response and extracts and concatenates "results" returned by API. Arguments: response (dict): API response object. endpoint (Slumber.Resource): API endpoint object. content_filter_query (dict): query parameters used to filter catalog results. query_params (dict): query parameters used to pagin...
juraj-google-style
def _create_non_scalar_select(main_expr: _evaluation.ExpressionNode, other_expr: _evaluation.ExpressionNode, main_result: _sql_data_types.StandardSqlExpression, other_result: _sql_data_types.StandardSqlExpression, collection_check_func_name: str, sql_data_type: _sql_data_types.StandardSqlDataType, sql_alias: str): ...
Construct a Spark SQL select statement for non-scalar values. Args: main_expr: The primary (either left or right) expression being evaluated. other_expr: The secondary (opposite of main) expression. main_result: The result of evaluating the main expression. other_result: The result of evaluating the other expression. ...
github-repos
def install_bootstrapped_files(nb_path=None, server_config=True, DEBUG=False): install_path = None print('Starting hide_code.js install...') current_dir = path.abspath(path.dirname(__file__)) config_dirs = j_path.jupyter_config_path() notebook_module_path = Utils.get_notebook_module_dir() ...
Installs javascript and exporting server extensions in Jupyter notebook. Args: nb_path (string): Path to notebook module. server_config (boolean): Install exporting server extensions. DEBUG (boolean): Verbose mode.
juraj-google-style
def accept_prompt(self, text=None, response=None, wait=None): with self.driver.accept_modal("prompt", text=text, response=response, wait=wait): yield
Execute the wrapped code, accepting a prompt, optionally responding to the prompt. Args: text (str | RegexObject, optional): Text to match against the text in the modal. response (str, optional): Response to provide to the prompt. wait (int | float, optional): Maximum time to wait for the modal to appear after executi...
juraj-google-style
def format_underline(s, char="=", indents=0): n = len(s) ind = " " * indents return ["{}{}".format(ind, s), "{}{}".format(ind, char*n)]
Traces a dashed line below string Args: s: string char: indents: number of leading intenting spaces Returns: list >>> print("\\n".join(format_underline("Life of João da Silva", "^", 2))) Life of João da Silva ^^^^^^^^^^^^^^^^^^^^^
juraj-google-style
def valid_file(value): if not value: raise argparse.ArgumentTypeError("'' is not a valid file path") elif not os.path.exists(value): raise argparse.ArgumentTypeError( "%s is not a valid file path" % value) elif os.path.isdir(value): raise argparse.ArgumentTypeError( ...
Check if given file exists and is a regular file. Args: value (str): path to the file. Raises: argparse.ArgumentTypeError: if not valid. Returns: str: original value argument.
juraj-google-style
def validate_variable_name(self, name): if not name: raise SerializerError("Variable name is empty".format(name)) if name[0] not in PROPERTY_ALLOWED_START: msg = "Variable name '{}' must starts with a letter" raise SerializerError(msg.format(name)) ...
Validate variable name. Arguments: name (string): Property name. Returns: bool: ``True`` if variable name is valid.
juraj-google-style
def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: if token_ids_1 is None: return self.prefix_tokens + token_ids_0 + self.suffix_tokens return self.prefix_tokens + token_ids_0 + token_ids_1 + self.suffix_tokens
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. The special tokens depend on calling set_lang. An MBART-50 sequence has the following format, where `X` represents the sequence: - `input_ids` (for encoder) `[src_lang_code] X [eos]` ...
github-repos
def rapidfire(self, max_nlaunch=(- 1), max_loops=1, sleep_time=5): (num_launched, do_exit, launched) = (0, False, []) for count in range(max_loops): if do_exit: break if (count > 0): time.sleep(sleep_time) tasks = self.fetch_tasks_to_run() if any(((task in...
Keeps submitting `Tasks` until we are out of jobs or no job is ready to run. Args: max_nlaunch: Maximum number of launches. default: no limit. max_loops: Maximum number of loops sleep_time: seconds to sleep between rapidfire loop iterations Returns: The number of tasks launched.
codesearchnet
def positions(self, account: str = '') -> List[Position]: if account: return list(self.wrapper.positions[account].values()) else: return [v for d in self.wrapper.positions.values() for v in d.values()]
List of positions for the given account, or of all accounts if account is left blank. Args: account: If specified, filter for this account name.
juraj-google-style
def delete_template(self, template_id): url = self.TEMPLATE_DELETE_URL request = self._get_request() response = request.post((url + template_id), get_json=False) return response
Deletes the specified template Args: template_id (str): The id of the template to delete Returns: A status code
codesearchnet
def Update(self, other, callback): self.conditions.update(other.conditions) self._Register(other.conditions, callback)
Adds existing triggers to this set, optionally rebuilding the registry. Used to aggregate trigger methods from Probes to Methods to Checks. Args: other: Another Triggers object. callback: Registers all the updated triggers to the specified function.
juraj-google-style
def iplot_state_hinton(rho, figsize=None): html_template = Template() javascript_template = Template() rho = _validate_input_state(rho) if figsize is None: options = {} else: options = {'width': figsize[0], 'height': figsize[1]} div_number = str(time.time()...
Create a hinton representation. Graphical representation of the input array using a 2D city style graph (hinton). Args: rho (array): Density matrix figsize (tuple): Figure size in pixels.
juraj-google-style
def __init__(self, obj): if isinstance(obj, Stream) and obj.stream_dict.get("/Subtype") != "/Image": raise TypeError("can't construct PdfImage from non-image") self.obj = obj
Construct a PDF image from a Image XObject inside a PDF ``pim = PdfImage(page.Resources.XObject['/ImageNN'])`` Args: obj (pikepdf.Object): an Image XObject
juraj-google-style
def rotateInZMat(theta_deg): ct = np.cos(np.radians(theta_deg)) st = np.sin(np.radians(theta_deg)) rMat = np.array([[ct, (- st), 0], [st, ct, 0], [0, 0, 1]]) return rMat
Rotate a vector theta degrees around the z-axis Equivalent to yaw left Rotates the vector in the sense that the x-axis is rotated towards the y-axis. If looking along the z-axis (which is not the way you usually look at it), the vector rotates clockwise. If sitting on the vector [1,0,0], the rotation is towards the ...
codesearchnet
def format(self, data: Iterable[_FormatArg]) -> bytes: fix_arg = self._fix_format_arg return (self.how % tuple((fix_arg(item) for item in data)))
String interpolation into the format string. Args: data: The data interpolated into the format string. Examples: :: BytesFormat(b'Hello, %b!') % b'World' BytesFormat(b'%b, %b!') % (b'Hello', b'World')
codesearchnet
def resolve(self, context, provider): try: self._value.resolve(context, provider) except FailedLookup as e: raise FailedVariableLookup(self.name, e.lookup, e.error)
Recursively resolve any lookups with the Variable. Args: context (:class:`stacker.context.Context`): Current context for building the stack provider (:class:`stacker.provider.base.BaseProvider`): subclass of the base provider
codesearchnet
def __init__(self, request, file, *args, **kwargs): self.ranged_file = RangedFileReader(file) super(RangedFileResponse, self).__init__(self.ranged_file, *args, **kwargs) if 'HTTP_RANGE' in request.META: self.add_range_headers(request.META['HTTP_RANGE'])
RangedFileResponse constructor also requires a request, which checks whether range headers should be added to the response. Args: request(WGSIRequest): The Django request object. file (File): A file-like object.
juraj-google-style
def save_chkpt_vars(dic, path): logger.info("Variables to save to {}:".format(path)) keys = sorted(list(dic.keys())) logger.info(pprint.pformat(keys)) assert not path.endswith('.npy') if path.endswith('.npz'): np.savez_compressed(path, **dic) else: with tf.Graph().as_defaul...
Save variables in dic to path. Args: dic: {name: value} path: save as npz if the name ends with '.npz', otherwise save as a checkpoint.
juraj-google-style
def make_rsa_keypair(bits): private_key = rsa.generate_private_key( public_exponent=65537, key_size=bits, backend=default_backend(), ) private_pem = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpen...
Generate an RSA keypair. Args: bits (int): number of bits to use for the key. Returns: (private_key, public_key) - both as PEM encoded strings
juraj-google-style
def get_current_track_info(self): response = self.avTransport.GetPositionInfo([('InstanceID', 0), ('Channel', 'Master')]) track = {'title': '', 'artist': '', 'album': '', 'album_art': '', 'position': ''} track['playlist_position'] = response['Track'] track['duration'] = response['TrackDuration'] tra...
Get information about the currently playing track. Returns: dict: A dictionary containing information about the currently playing track: playlist_position, duration, title, artist, album, position and an album_art link. If we're unable to return data for a field, we'll return an empty string. This can happen for all ...
codesearchnet
def transform(self, df): for (name, function) in self.outputs: df[name] = function(df)
Transforms a DataFrame in place. Computes all outputs of the DataFrame. Args: df (pandas.DataFrame): DataFrame to transform.
codesearchnet
def preprocess_mel(self, audio: np.ndarray, beatstep: np.ndarray): if audio is not None and len(audio.shape) != 1: raise ValueError(f'Expected `audio` to be a single channel audio input of shape `(n, )` but found shape {audio.shape}.') if beatstep[0] > 0.0: beatstep = beatstep - beatstep[0] ...
Preprocessing for log-mel-spectrogram Args: audio (`numpy.ndarray` of shape `(audio_length, )` ): Raw audio waveform to be processed. beatstep (`numpy.ndarray`): Interpolated values of the raw audio. If beatstep[0] is greater than 0.0, then it will be shifted by the value at beatstep[0].
github-repos
def __init__(self, email, password): self.email = email self.password = password self.token = None self.refresh_token = None self.last_api_call = None self.state = [] self.authenticated = self._authenticate()
Create the EcoNet API interface object. Args: email (str): EcoNet account email address. password (str): EcoNet account password.
juraj-google-style
def determinize(self): epsilon_closure = {} for state in self.states: sid = state.stateid epsilon_closure[sid] = self._epsilon_closure(state) trans_table = {} for state in self.states: trans_table[state.stateid] = defaultdi...
Transforms a Non Deterministic DFA into a Deterministic Args: None Returns: DFA: The resulting DFA Creating an equivalent DFA is done using the standard algorithm. A nice description can be found in the book: Harry R. Lewis and Christos H. Papadimitriou. 1998. E print target_dfa_statelements of the Theory of Computati...
juraj-google-style
def _CheckByteStreamSize(self, byte_stream, byte_offset, data_type_size): try: byte_stream_size = len(byte_stream) except Exception as exception: raise errors.MappingError(exception) if byte_stream_size - byte_offset < data_type_size: raise errors.ByteStreamTooSmallError( ...
Checks if the byte stream is large enough for the data type. Args: byte_stream (bytes): byte stream. byte_offset (int): offset into the byte stream where to start. data_type_size (int): data type size. Raises: ByteStreamTooSmallError: if the byte stream is too small. MappingError: if the size of the byte stream canno...
juraj-google-style
def resolves_for(self, node): self.actual_title = normalize_text(node.title) return bool(self.search_regexp.search(self.actual_title))
Resolves this query relative to the given node. Args: node (node.Document): The node to be evaluated. Returns: bool: Whether the given node matches this query.
juraj-google-style
def update_data(func): default = dict([ (param.name, param.default) for param in inspect.signature(func).parameters.values() if param.default != getattr(inspect, '_empty') ]) @wraps(func) def wrapper(*args, **kwargs): default.update(kwargs) kwargs.update(de...
Decorator to save data more easily. Use parquet as data format Args: func: function to load data from data source Returns: wrapped function
juraj-google-style
def add(self, spec): for limit in spec.limit_to: if limit not in self.limit_to: self.limit_to.append(limit)
Add limitations of given spec to self's. Args: spec (PackageSpec): another spec.
juraj-google-style
def deep_variable_product(variables, limit: int=DEEP_VARIABLE_LIMIT): return _deep_values_list_product([v.bindings for v in variables], set(), ComplexityLimit(limit))
Take the deep Cartesian product of a list of Variables. For example: x1.children = {v2, v3} v1 = {x1, x2} v2 = {x3} v3 = {x4, x5} v4 = {x6} then deep_variable_product([v1, v4]) will return: [[x1, x3, x4, x6], [x1, x3, x5, x6], [x2, x6]] . Args: variables: A sequence of Variables. limit: How many results we allow befor...
github-repos
def build_vep_string(vep_info, vep_columns): logger = getLogger(__name__) logger.debug("Building vep string from {0}".format(vep_info)) logger.debug("Found vep headers {0}".format(vep_columns)) vep_strings = [] for vep_annotation in vep_info: try: vep_info_list = [ ...
Build a vep string formatted string. Take a list with vep annotations and build a new vep string Args: vep_info (list): A list with vep annotation dictionaries vep_columns (list): A list with the vep column names found in the header of the vcf Returns: string: A string with the proper vep annotations
juraj-google-style
def delete_file_v2(path): _pywrap_file_io.DeleteFile(compat.path_to_bytes(path))
Deletes the path located at 'path'. Args: path: string, a path Raises: errors.OpError: Propagates any errors reported by the FileSystem API. E.g., `NotFoundError` if the path does not exist.
github-repos
def random_string(length=8, charset=None): if length < 1: raise ValueError('Length must be > 0') if not charset: charset = string.letters + string.digits return ''.join(random.choice(charset) for unused in xrange(length))
Generates a string with random characters. If no charset is specified, only letters and digits are used. Args: length (int) length of the returned string charset (string) list of characters to choose from Returns: (str) with random characters from charset Raises: -
juraj-google-style
def requirements(requirements_file): return [str(pkg.req) for pkg in parse_requirements(requirements_file, session=pip_download.PipSession()) if (pkg.req is not None)]
Return packages mentioned in the given file. Args: requirements_file (str): path to the requirements file to be parsed. Returns: (list): 3rd-party package dependencies contained in the file.
codesearchnet
def update_conversation(self, conversation): new_state = conversation.self_conversation_state old_state = self._conversation.self_conversation_state self._conversation = conversation if (not new_state.delivery_medium_option): new_state.delivery_medium_option.extend(old_state.delivery_medium_opti...
Update the internal state of the conversation. This method is used by :class:`.ConversationList` to maintain this instance. Args: conversation: ``Conversation`` message.
codesearchnet
def get_metadata(self, key) -> str: return self.metadata[key] if key in self.metadata else None
Get the value of a metadata. Returns None if metadata does not exist. Args: key (str): name of the metadata Returns: str: the value of the metadata (or None)
juraj-google-style
def _wait_for_any_event(events, timeout_s): def any_event_set(): return any(event.is_set() for event in events) result = timeouts.loop_until_timeout_or_true( timeout_s, any_event_set, sleep_s=_WAIT_FOR_ANY_EVENT_POLL_S) return result or any_event_set()
Wait for any in a list of threading.Event's to be set. Args: events: List of threading.Event's. timeout_s: Max duration in seconds to wait before returning. Returns: True if at least one event was set before the timeout expired, else False.
juraj-google-style
def points_are_in_a_straight_line(points, tolerance=1e-07): a = points[0] b = points[1] for c in points[2:]: if (area_of_a_triangle_in_cartesian_space(a, b, c) > tolerance): return False return True
Check whether a set of points fall on a straight line. Calculates the areas of triangles formed by triplets of the points. Returns False is any of these areas are larger than the tolerance. Args: points (list(np.array)): list of Cartesian coordinates for each point. tolerance (optional:float): the maximum triangle siz...
codesearchnet
def _parse_order_by(model, order_by): out = [] for key in order_by: key = key.strip() if key.startswith('+'): out.append(getattr(model, key[1:])) elif key.startswith('-'): out.append(getattr(model, key[1:]).desc()) else: out.append(getattr(mode...
This function figures out the list of orderings for the given model and argument. Args: model (nautilus.BaseModel): The model to compute ordering against order_by (list of str): the list of fields to order_by. If the field starts with a `+` then the order is acending, if `-` descending, if no character proceeds the fi...
codesearchnet
def iterable_source(iterable, target): it = iter(iterable) for item in it: try: target.send(item) except StopIteration: return prepend(item, it) return empty_iter()
Convert an iterable into a stream of events. Args: iterable: A series of items which will be sent to the target one by one. target: The target coroutine or sink. Returns: An iterator over any remaining items.
juraj-google-style
def onCall(self, n): cond_oncall = (n + 1) return _SinonStubCondition(copy=self._copy, oncall=cond_oncall, cond_args=self._cond_args, cond_kwargs=self._cond_kwargs)
Adds a condition for when the stub is called. When the condition is met, a special return value can be returned. Adds the specified call number into the condition list. For example, when the stub function is called the second time, it will return "#": stub.onCall(1).returns("#") Without returns/throws at the end of t...
codesearchnet
def track_event(self, name, properties=None, measurements=None): data = channel.contracts.EventData() data.name = name or NULL_CONSTANT_STRING if properties: data.properties = properties if measurements: data.measurements = measurements self.trac...
Send information about a single event that has occurred in the context of the application. Args: name (str). the data to associate to this event.\n properties (dict). the set of custom properties the client wants attached to this data item. (defaults to: None)\n measurements (dict). the set of custom measurements the ...
juraj-google-style
def load_attributes_from_hdf5_group(group, name): if name in group.attrs: data = [n.decode('utf8') if hasattr(n, 'decode') else n for n in group.attrs[name]] else: data = [] chunk_id = 0 while f'{name}{chunk_id}' in group.attrs: data.extend([n.decode('utf8') if hasatt...
Loads attributes of the specified name from the HDF5 group. This method deals with an inherent problem of HDF5 file which is not able to store data larger than HDF5_OBJECT_HEADER_LIMIT bytes. Args: group: A pointer to a HDF5 group. name: A name of the attributes to load. Returns: data: Attributes data.
github-repos
def fully_qualify_alias_labels(label, aliases): for (alias, full_name) in aliases.items(): if (label == alias): return full_name elif label.startswith((alias + '.')): return (full_name + label[len(alias):]) return label
Replace any aliases in label with the fully qualified name. Args: label -- A label : str representing a name (e.g. myos.system) aliases -- A dict of {alias: real_name} (e.g. {'myos': 'os'}) >>> fully_qualify_alias_labels('myos.mycall', {'myos':'os'}) 'os.mycall'
codesearchnet
def _ParseRecord(self, parser_mediator, file_object, record_offset): record_strings_data_offset = file_object.tell() record_strings_data_size = record_offset - record_strings_data_offset record_strings_data = self._ReadData( file_object, record_strings_data_offset, record_strings_data_size) ...
Parses a record and produces events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (file): file-like object. record_offset (int): offset of the record relative to the start of the file. Returns: int: next record offset. Rais...
juraj-google-style
def get_location_from_HDX_code(code, locations=None, configuration=None): if (locations is None): locations = Locations.validlocations(configuration) for locdict in locations: if (code.upper() == locdict['name'].upper()): return locdict['title'] return None
Get location from HDX location code Args: code (str): code for which to get location name locations (Optional[List[Dict]]): Valid locations list. Defaults to list downloaded from HDX. configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. Returns: Optional[str]: location name
codesearchnet
def assert_same_float_dtype(tensors=None, dtype=None): if tensors: dtype = _assert_same_base_type(tensors, dtype) if not dtype: dtype = dtypes.float32 elif not dtype.is_floating: raise ValueError('Expected floating point type, got %s.' % dtype) return dtype
Validate and return float type based on `tensors` and `dtype`. For ops such as matrix multiplication, inputs and weights must be of the same float type. This function validates that all `tensors` are the same type, validates that type is `dtype` (if supplied), and returns the type. Type must be a floating point type. ...
github-repos
def parse_datetime(__string: str) -> datetime.datetime: if (not __string): datetime_ = datetime.datetime.now(datetime.timezone.utc) else: datetime_ = ciso8601.parse_datetime(__string) if (datetime_.tzinfo is None): datetime_ = datetime_.replace(tzinfo=datetime.timezone.utc) retur...
Parse ISO-8601 datetime string. Args: __string: Datetime string to parse Returns: Parsed datetime object
codesearchnet
def open_usb_handle(self, port_num): serial = self.get_usb_serial(port_num) return local_usb.LibUsbHandle.open(serial_number=serial)
open usb port Args: port_num: port number on the Cambrionix unit Return: usb handle
juraj-google-style
def decode(self, fp: TextIO) -> BioCCollection: tree = etree.parse(fp) collection = self.__parse_collection(tree.getroot()) collection.encoding = tree.docinfo.encoding collection.standalone = tree.docinfo.standalone collection.version = tree.docinfo.xml_version return collection
Deserialize ``fp`` to a BioC collection object. Args: fp: a ``.read()``-supporting file-like object containing a BioC collection Returns: an object of BioCollection
codesearchnet
def as_int(self) -> int: if len(self._messages) != 1: raise ValueError('FHIRPath did not evaluate to a single integer.') return proto_utils.get_value_at_field(self._messages[0], 'value')
Returns the result as an integer. Raises: ValueError if the `EvaluationResult` is not a single integer.
github-repos
def kick_user(self, user_id, reason=""): try: self.client.api.kick_user(self.room_id, user_id) return True except MatrixRequestError: return False
Kick a user from this room. Args: user_id (str): The matrix user id of a user. reason (str): A reason for kicking the user. Returns: boolean: Whether user was kicked.
juraj-google-style
def setPulseInputRatio(self, line_in, new_cnst, password='00000000'): result = False self.setContext('setPulseInputRatio') try: if (not self.requestA()): self.writeCmdMsg('Bad read CRC on setting') elif (not self.serialCmdPwdAuth(password)): self.writeCmdMsg('Password...
Serial call to set pulse input ratio on a line. Args: line_in (int): Member of :class:`~ekmmeters.Pulse` new_cnst (int): New pulse input ratio password (str): Optional password Returns:
codesearchnet
def message_factory(msg_type, msg_types=MESSAGE_TYPES, *args, **kwargs): try: return msg_types[msg_type.lower()](*args, **kwargs) except (UnknownProfileError, InvalidMessageInputError) as e: err_exit('Unable to send message: ', e) except KeyError: raise UnsupportedMessageTypeError(ms...
Factory function to return the specified message instance. Args: :msg_type: (str) the type of message to send, i.e. 'Email' :msg_types: (str, list, or set) the supported message types :kwargs: (dict) keywords arguments that are required for the various message types. See docstrings for each type. i.e. help(messages.E...
codesearchnet
def get_file_handle(file_path): LOG.debug("Check if file end is correct") if not os.path.exists(file_path): raise IOError("No such file:{0}".format(file_path)) if not os.path.splitext(file_path)[-1] in VALID_ENDINGS: raise IOError("Not a valid vcf file name: {}".format(file_path)) ...
Return cyvcf2 VCF object Args: file_path(str) Returns: vcf_obj(cyvcf2.VCF)
juraj-google-style
def readinto(self, b): self._checkClosed() if self._position >= self._downloader.size: return 0 start = self._position end = min(self._position + len(b), self._downloader.size) data = self._downloader.get_range(start, end) self._position += len(data) b[:len(data)] = data return l...
Read up to len(b) bytes into b. Returns number of bytes read (0 for EOF). Args: b: (bytearray/memoryview) Buffer to read into.
github-repos
def lift_to_graph(tensors, graph, sources=None, disallowed_placeholders=None, add_sources=False, handle_captures=False, base_graph=None, op_map=None): variable_init_tensors = [] init_tensors = [] for tensor in tensors: if isinstance(tensor, resource_variable_ops.ResourceVariable): variab...
Copies the tensor and all its inputs recursively to the outer graph. Args: tensors: The Tensors to lift. graph: The graph to lift to. sources: Optional sequence of nodes to start from. If omitted the whole subgraph which feeds into `init_tensor` is lifted. disallowed_placeholders: An optional set of ops which may not ...
github-repos
def run_inference(self, batch: Sequence[dict[str, torch.Tensor]], model: torch.nn.Module, inference_args: Optional[dict[str, Any]]=None) -> Iterable[PredictionResult]: inference_args = {} if not inference_args else inference_args model_id = self._state_dict_path if not self._torch_script_model_path else self._t...
Runs inferences on a batch of Keyed Tensors and returns an Iterable of Tensor Predictions. For the same key across all examples, this will stack all Tensors values in a vectorized format to optimize the inference call. Args: batch: A sequence of keyed Tensors. These Tensors should be batchable, as this method will ca...
github-repos
def error_handler(self, handler): if (not self.opened()): handler = (handler or util.noop) self._error_handler = enums.JLinkFunctions.LOG_PROTOTYPE(handler) self._dll.JLINKARM_SetErrorOutHandler(self._error_handler)
Setter for the error handler function. If the DLL is open, this function is a no-op, so it should be called prior to calling ``open()``. Args: self (JLink): the ``JLink`` instance handler (function): function to call on error messages Returns: ``None``
codesearchnet
def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def, signature_def_key): if signature_def_key not in meta_graph_def.signature_def: raise ValueError(f'Could not find signature "{signature_def_key}". Please choose from: {', '.join(meta_graph_def.signature_def.keys())}') return meta_graph_def.si...
Gets TensorInfo for all inputs of the SignatureDef. Returns a dictionary that maps each input key to its TensorInfo for the given signature_def_key in the meta_graph_def Args: meta_graph_def: MetaGraphDef protocol buffer with the SignatureDef map to look up SignatureDef key. signature_def_key: A SignatureDef key stri...
github-repos
def _bulk_cache_lookup(self, api_name, keys): if self._cache: responses = self._cache.bulk_lookup(api_name, keys) missing_keys = [key for key in keys if (key not in responses.keys())] return (responses, missing_keys) return ({}, keys)
Performes a bulk cache lookup and returns a tuple with the results found and the keys missing in the cache. If cached is not configured it will return an empty dictionary of found results and the initial list of keys. Args: api_name: a string name of the API. keys: an enumerable of string keys. Returns: A tuple: (resp...
codesearchnet