code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def from_db(cls, bigchain, tx_dict_list): return_list = True if isinstance(tx_dict_list, dict): tx_dict_list = [tx_dict_list] return_list = False tx_map = {} tx_ids = [] for tx in tx_dict_list: tx.update({'metadata': None}) tx_map[tx['id']] = tx tx_ids.append(...
Helper method that reconstructs a transaction dict that was returned from the database. It checks what asset_id to retrieve, retrieves the asset from the asset table and reconstructs the transaction. Args: bigchain (:class:`~bigchaindb.tendermint.BigchainDB`): An instance of BigchainDB used to perform database queries...
codesearchnet
def normalize_date(tmy_date, year): month = tmy_date.month day = (tmy_date.day - 1) hour = tmy_date.hour if ((month is 1) and (day is 0) and (hour is 0)): year = (year + 1) return (datetime.datetime(year, month, 1) + datetime.timedelta(days=day, hours=hour, minutes=0))
change TMY3 date to an arbitrary year. Args: tmy_date (datetime): date to mangle. year (int): desired year. Returns: (None)
codesearchnet
async def send_message(self, name, level, message): if (name not in self.services): raise ArgumentError('Unknown service name', short_name=name) msg = self.services[name]['state'].post_message(level, message) (await self._notify_update(name, 'new_message', msg.to_dict()))
Post a message for a service. Args: name (string): The short name of the service to query level (int): The level of the message (info, warning, error) message (string): The message contents
codesearchnet
def collect_publications(self): pubs = list(self.sub_publications) for sub_tree in self.sub_trees: pubs.extend(sub_tree.collect_publications()) return pubs
Recursively collect list of all publications referenced in this tree and all sub-trees. Returns: list: List of UUID strings.
codesearchnet
def __init__(self, dtype, shape, accumulator_ref): self._dtype = dtype if shape is not None: self._shape = tensor_shape.TensorShape(shape) else: self._shape = tensor_shape.unknown_shape() self._accumulator_ref = accumulator_ref if context.executing_eagerly(): self._name = con...
Creates a new ConditionalAccumulator. Args: dtype: Datatype of the accumulated gradients. shape: Shape of the accumulated gradients. accumulator_ref: A handle to the conditional accumulator, created by sub- classes
github-repos
def DefaultParseValue(value): try: return _LiteralEval(value) except (SyntaxError, ValueError): return value
The default argument parsing function used by Fire CLIs. If the value is made of only Python literals and containers, then the value is parsed as it's Python value. Otherwise, provided the value contains no quote, escape, or parenthetical characters, the value is treated as a string. Args: value: A string from the co...
github-repos
def get(self): return self._master._get_helper(self._master._sorted_items, self._q)
Returns the calculated quantiles based on the master tracker's buffer. Returns: A list of calculated quantiles.
github-repos
def easeInOutCirc(n): _checkRange(n) n = n * 2 if n < 1: return -0.5 * (math.sqrt(1 - n**2) - 1) else: n = n - 2 return 0.5 * (math.sqrt(1 - n**2) + 1)
A circular tween function that accelerates, reaches the midpoint, and then decelerates. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
juraj-google-style
def copy_to_file(self, name, fp_dest, callback=None): assert compat.is_native(name) def _write_to_file(data): fp_dest.write(data) if callback: callback(data) self.ftp.retrbinary( "RETR {}".format(name), _write_to_file, F...
Write cur_dir/name to file-like `fp_dest`. Args: name (str): file name, located in self.curdir fp_dest (file-like): must support write() method callback (function, optional): Called like `func(buf)` for every written chunk
juraj-google-style
def _tokens_to_subtoken(self, tokens): ret = [] for token in tokens: ret.extend( self._escaped_token_to_subtoken_strings(_escape_token(token, self._alphabet))) return ret
Converts a list of tokens to a list of subtoken. Args: tokens: a list of strings. Returns: a list of integers in the range [0, vocab_size)
juraj-google-style
def select_by_value(self, value): self._selected_key = None self._selected_item = None for k in self.children: item = self.children[k] if (item.get_text() == value): item.attributes['selected'] = 'selected' self._selected_key = k self._selected_item = item...
Selects a DropDownItem by means of the contained text- Args: value (str): Textual content of the DropDownItem that have to be selected.
codesearchnet
def _unwrap_el(self, value): if isinstance(value, dict) and 'ELEMENT' in value: element_id = value.get('ELEMENT') return WebElement(element_id, self) elif isinstance(value, list) and not isinstance(value, str): return [self._unwrap_el(item) for item in value]...
Convert {'Element': 1234} to WebElement Object Args: value(str|list|dict): The value field in the json response. Returns: The unwrapped value.
juraj-google-style
def extend(*args): if not args: return {} first = args[0] rest = args[1:] out = type(first)(first) for each in rest: out.update(each) return out
shallow dictionary merge Args: a: dict to extend b: dict to apply to a Returns: new instance of the same type as _a_, with _a_ and _b_ merged.
juraj-google-style
def save_to_file(json_data, filename): if filename[-5:] != '.json': print('filename: %s' % filename) filename += '.json' with open(PATH_TO_DIR + '/' + filename, 'w') as f: json.dump(json_data, f, sort_keys=True, indent=4) print(' Successfully wrote configs to file `%s`.\n' % filename...
Saves all detected configuration(s) into a JSON file. Args: json_data: Dict of all configurations found. filename: String that is the name of the output JSON file.
github-repos
def weights_concatenated(labels): eos_mask = tf.to_int32(tf.equal(labels, 1)) sentence_num = tf.cumsum(eos_mask, axis=1, exclusive=True) in_target = tf.equal(tf.mod(sentence_num, 2), 1) sentence_num_plus_one = (sentence_num + 1) shifted = tf.pad(sentence_num_plus_one, [[0, 0], [2, 0], [0, 0], [0, 0]...
Assign weight 1.0 to the "target" part of the concatenated labels. The labels look like: source English I love you . ID1 target French Je t'aime . ID1 source English the cat ID1 target French le chat ID1 source English ... We want to assign weight 1.0 to all words in the target text (including the ID1 end symbol), bu...
codesearchnet
def get_dict(self): self.is_valid() return self._get_dict()
Returns the internal-API dictionary representing the :class:`DisplayDataItem`. Returns: Dict[str, Any]: A dictionary. The internal-API dictionary representing the :class:`DisplayDataItem`. Raises: ValueError: if the item is not valid.
github-repos
def insert_chain(cur, chain, encoded_data=None): if (encoded_data is None): encoded_data = {} if ('nodes' not in encoded_data): encoded_data['nodes'] = json.dumps(sorted(chain), separators=(',', ':')) if ('chain_length' not in encoded_data): encoded_data['chain_length'] = len(chain) ...
Insert a chain into the cache. Args: cur (:class:`sqlite3.Cursor`): An sqlite3 cursor. This function is meant to be run within a :obj:`with` statement. chain (iterable): A collection of nodes. Chains in embedding act as one node. encoded_data (dict, optional): If a dictionary is provided, it will be populated with t...
codesearchnet
def CopyMicrosecondsToFractionOfSecond(cls, microseconds): if microseconds < 0 or microseconds >= definitions.MICROSECONDS_PER_SECOND: raise ValueError( 'Number of microseconds value: {0:d} out of bounds.'.format( microseconds)) return decimal.Decimal(microseconds) / definiti...
Copies the number of microseconds to a fraction of second value. Args: microseconds (int): number of microseconds. Returns: decimal.Decimal: fraction of second, which must be a value between 0.0 and 1.0. Raises: ValueError: if the number of microseconds is out of bounds.
juraj-google-style
def fit(self, X, y): self._word_vocab.add_documents(X) self._label_vocab.add_documents(y) if self._use_char: for doc in X: self._char_vocab.add_documents(doc) self._word_vocab.build() self._char_vocab.build() self._label_vocab.build() return self
Learn vocabulary from training set. Args: X : iterable. An iterable which yields either str, unicode or file objects. Returns: self : IndexTransformer.
codesearchnet
def setRelay(self, seconds, relay, status, password="00000000"): result = False self.setContext("setRelay") try: self.clearCmdMsg() if len(password) != 8: self.writeCmdMsg("Invalid password length.") self.setContext("") ...
Serial call to set relay. Args: seconds (int): Seconds to hold, ero is hold forever. See :class:`~ekmmeters.RelayInterval`. relay (int): Selected relay, see :class:`~ekmmeters.Relay`. status (int): Status to set, see :class:`~ekmmeters.RelayState` password (str): Optional password Returns: bool: True on completion an...
juraj-google-style
def typing(self, room: Room, timeout: int=5000): path = f'/rooms/{quote(room.room_id)}/typing/{quote(self.user_id)}' return self.api._send('PUT', path, {'typing': True, 'timeout': timeout})
Send typing event directly to api Args: room: room to send typing event to timeout: timeout for the event, in ms
codesearchnet
def list_workers(config, *, filter_by_queues=None): celery_app = create_app(config) worker_stats = celery_app.control.inspect().stats() queue_stats = celery_app.control.inspect().active_queues() if worker_stats is None: return [] workers = [] for name, w_stat in worker_stats.items...
Return a list of all available workers. Args: config (Config): Reference to the configuration object from which the settings are retrieved. filter_by_queues (list): Restrict the returned workers to workers that listen to at least one of the queue names in this list. Returns: list: A list of WorkerStats objects.
juraj-google-style
def _RemoveForwardedIps(self, forwarded_ips, interface): for address in forwarded_ips: self.ip_forwarding_utils.RemoveForwardedIp(address, interface)
Remove the forwarded IP addresses from the network interface. Args: forwarded_ips: list, the forwarded IP address strings to delete. interface: string, the output device to use.
codesearchnet
def add_to_screen(self, screen_width, screen): for (lineno, fields) in enumerate(self.line_fields): for (left, field) in self.compute_positions(screen_width, fields): logger.debug('Adding field %s to screen %s at x=%d->%d, y=%d', field, screen.ref, left, ((left + field.width) - 1), (1 + lineno))...
Add the pattern to a screen. Also fills self.widgets. Args: screen_width (int): the width of the screen screen (lcdprod.Screen): the screen to fill.
codesearchnet
def create(cls, **kwargs): try: return cls.add(cls.new(**kwargs)) except: cls.session.rollback() raise
Initializes a new instance, adds it to the db and commits the transaction. Args: **kwargs: The keyword arguments for the init constructor. Examples: >>> user = User.create(name="Vicky", email="vicky@h.com") >>> user.id 35
codesearchnet
def __add__(self, other): assert isinstance(other, LocationDescriptor), "You can only add LocationDescriptor together." assert self._separation_char == other._separation_char, \ "You can only add LocationDescriptor together if they share the same separator character." ...
Create a **new** :class:`LocationDescriptor` object that is the sum of this one and another. Args: self: This :class:`LocationDescriptor` object. other: Another :class:`LocationDescriptor` object. Returns: Sum of both :class:`LocationDescriptor` objects.
juraj-google-style
def _GetFirefoxConfig(self, file_object, display_name): to_read = min(file_object.get_size(), self._INITIAL_CACHE_FILE_SIZE) while (file_object.get_offset() < to_read): offset = file_object.get_offset() try: (cache_entry, _) = self._ReadCacheEntry(file_object, display_name, self._MIN...
Determine cache file block size. Args: file_object (dfvfs.FileIO): a file-like object. display_name (str): display name. Returns: firefox_cache_config: namedtuple containing the block size and first record offset. Raises: UnableToParseFile: if no valid cache record could be found.
codesearchnet
def cooccurrences(self, domains): api_name = 'opendns-cooccurrences' fmt_url_path = u'recommendations/name/{0}.json' return self._multi_get(api_name, fmt_url_path, domains)
Get the domains related to input domains. Args: domains: an enumerable of strings domain names Returns: An enumerable of string domain names
juraj-google-style
def circuit_to_quirk_url(circuit: circuits.Circuit, prefer_unknown_gate_to_failure: bool=False, escape_url=True) -> str: circuit = circuit.copy() linearize_circuit_qubits(circuit) cols = [] for moment in circuit: can_merges = [] for op in moment.operations: for (col, can_merg...
Returns a Quirk URL for the given circuit. Args: circuit: The circuit to open in Quirk. prefer_unknown_gate_to_failure: If not set, gates that fail to convert will cause this function to raise an error. If set, a URL containing bad gates will be generated. (Quirk will open the URL, and replace the bad gates with parse...
codesearchnet
def _prepare_tables(self): values = torch.tensor([[[1.0, 2.0, 3.0], [2.0, 0.0, 1.0], [1.0, 3.0, 4.0]], [[1.0, 2.0, 3.0], [2.0, 0.0, 1.0], [1.0, 3.0, 4.0]]]) row_index = IndexMap(indices=torch.tensor([[[0, 0, 0], [1, 1, 1], [2, 2, 2]], [[0, 0, 0], [1, 1, 1], [2, 2, 2]]]), num_segments=3, batch_dims=1) col_in...
Prepares two tables, both with three distinct rows. The first table has two columns: 1.0, 2.0 | 3.0 2.0, 0.0 | 1.0 1.0, 3.0 | 4.0 The second table has three columns: 1.0 | 2.0 | 3.0 2.0 | 0.0 | 1.0 1.0 | 3.0 | 4.0 Returns: SegmentedTensors with the tables.
github-repos
def str(name, default=None, allow_none=False, fallback=None): value = read(name, default, allow_none, fallback=fallback) if ((value is None) and allow_none): return None else: return builtins.str(value).strip()
Get a string based environment value or the default. Args: name: The environment variable name default: The default value to use if no environment variable is found allow_none: If the return value can be `None` (i.e. optional)
codesearchnet
def is_deterministic(self): return False
Whether this coder is guaranteed to encode values deterministically. A deterministic coder is required for key coders in GroupByKey operations to produce consistent results. For example, note that the default coder, the PickleCoder, is not deterministic: the ordering of picked entries in maps may vary across executio...
github-repos
def _cauchy_equation(wavelength, coefficients): n = 0.0 for (i, c) in enumerate(coefficients): exponent = (2 * i) n += (c / (wavelength ** exponent)) return n
Helpful function to evaluate Cauchy equations. Args: wavelength (float, list, None): The wavelength(s) the Cauchy equation will be evaluated at. coefficients (list): A list of the coefficients of the Cauchy equation. Returns: float, list: The refractive index at the target wavelength(s).
codesearchnet
def _calculate_scores(self, query, key): q_reshaped = array_ops.expand_dims(query, axis=-2) k_reshaped = array_ops.expand_dims(key, axis=-3) if self.use_scale: scale = self.scale else: scale = 1.0 return math_ops.reduce_sum(scale * math_ops.tanh(q_reshaped + k_reshaped), axis=-1)
Calculates attention scores as a nonlinear sum of query and key. Args: query: Query tensor of shape `[batch_size, Tq, dim]`. key: Key tensor of shape `[batch_size, Tv, dim]`. Returns: Tensor of shape `[batch_size, Tq, Tv]`.
github-repos
def request_via_socket(sock, search_target): msgparts = dict(HOST=MCAST_IP_PORT, MAN='"ssdp:discover"', MX='3', ST=search_target) msg = encode_request('M-SEARCH * HTTP/1.1', **msgparts) sock.sendto(msg, (MCAST_IP, MCAST_PORT))
Send an SSDP search request via the provided socket. Args: sock: A socket suitable for use to send a broadcast message - preferably one created by :py:func:`make_socket`. search_target (string): A :term:`resource type` target to search for.
codesearchnet
def add_gemini_query(self, name, query): logger.info('Adding query {0} with text {1}'.format(name, query)) new_query = GeminiQuery(name=name, query=query) self.session.add(new_query) self.save() return new_query
Add a user defined gemini query Args: name (str) query (str)
codesearchnet
def codeblocks(start=None, end=None, full=True): if full: for function in functions(start, end): fc = FlowChart(f=function.func_t) for block in fc: (yield block) else: (start, end) = fix_addresses(start, end) for code_block in FlowChart(bounds=(sta...
Get all `CodeBlock`s in a given range. Args: start - start address of the range. If `None` uses IDB start. end - end address of the range. If `None` uses IDB end. full - `True` is required to change node info (e.g. color). `False` causes faster iteration.
codesearchnet
def load(self, key: str) -> _ModelLoadStats: if key in self._tag_map: self._tag_map.move_to_end(key) return _ModelLoadStats(self._tag_map[key], None, None) else: self._tag_map[key] = uuid.uuid4().hex tag = self._tag_map[key] mh = self._mh_map[key] if self._max_models is not N...
Loads the appropriate model for the given key into memory. Args: key: the key associated with the model we'd like to load. Returns: _ModelLoadStats with tag, byte size, and latency to load the model. If the model was already loaded, byte size/latency will be None.
github-repos
def chain(processor_list: Sequence[Processor | PartProcessor]) -> Processor: if not processor_list: raise ValueError('processor_list is empty') chain_processor = processor_list[0] for p in processor_list[1:]: chain_processor = chain_processor + p if isinstance(chain_processor, PartProces...
Chain a sequence of processors. Args: processor_list: list of part processors or generic processors. Returns: A processor consisting of the chain of all the processors in the list. The execution is sequential from the first processor to the last but parts are processed concurrently overall.
github-repos
def _int_to_pos(self, flat_position): return ((flat_position % self.env.action_space.screen_shape[0]), (flat_position % self.env.action_space.screen_shape[1]))
Returns x, y from flat_position integer. Args: flat_position: flattened position integer Returns: x, y
codesearchnet
def search_rule_by_id(self, ruleID) -> Rule: for r in self.rules: if r.id == ruleID: return r return None
searches a rule by given id Args: ruleID(str): the rule to search for Returns the rule object or None if it couldn't find a rule
juraj-google-style
def __init__(self, text_encoder_config=None, data="clean100", **kwargs): if data not in _DATA_OPTIONS: raise ValueError("data must be one of %s" % _DATA_OPTIONS) name = kwargs.get("name") if name is None: encoder_name = ( text_encoder_config.name if text_encoder_config else "plain...
Constructs a LibrispeechConfig. Args: text_encoder_config: `tfds.features.text.TextEncoderConfig`, configuration for the `tfds.features.text.TextEncoder` used for the text feature. data: `str`, one of `(clean100, clean360, all)`. `clean100` uses only the clean data without `train-clean-360`. `clean360` uses clean data...
juraj-google-style
def check_for_missing_options(config): for (section_name, section) in config: for (option_name, option) in section: if (option.required and (option.value is None)): raise exc.MissingRequiredOption('Option {0} in namespace {1} is required.'.format(option_name, section_name)) r...
Iter over a config and raise if a required option is still not set. Args: config (confpy.core.config.Configuration): The configuration object to validate. Raises: MissingRequiredOption: If any required options are not set in the configuration object. Required options with default values are considered set and will n...
codesearchnet
def _alephResultToDict(dom): result = {} for i in dom.childs: if not i.isOpeningTag(): continue keyword = i.getTagName().strip() value = _tryConvertToInt(i.getContent().strip()) if keyword in result: if isinstance...
Convert part of non-nested XML to :py:class:`dict`. Args: dom (HTMLElement tree): pre-parsed XML (see dhtmlparser). Returns: dict: with python data
juraj-google-style
def lookup_instances(fragment, verbose=True, filter_by_key=True): def vprint(*args): if verbose: print(*args) region = get_region() client = get_ec2_client() ec2 = get_ec2_resource() response = client.describe_instances() assert is_good_response(response) instance_list = [] for instance ...
Returns ec2.Instance object whose name contains fragment, in reverse order of launching (ie, most recent intance first). Optionally filters by key, only including instances launched with key_name matching current username. args: verbose: print information about all matching instances found filter_by_key if True, ign...
juraj-google-style
def __setattr__(self, name, value): if name in self.__by_name or name.startswith('_Message__'): object.__setattr__(self, name, value) else: raise AttributeError("May not assign arbitrary value %s " "to message %s" % (name, type(self).__na...
Change set behavior for messages. Messages may only be assigned values that are fields. Does not try to validate field when set. Args: name: Name of field to assign to. value: Value to assign to field. Raises: AttributeError when trying to assign value that is not a field.
juraj-google-style
def __init__(self, port=CONTROLLER_PORT, easgd_alpha=0.5, start_halving_at=6, end_at=10, sync_freq=10, halving_freq=1, valid_freq=1500, learning_rate=0.1, log_path=None): Controller.__init__(self, port) self.epoch_start_halving = start_halvin...
Initialize the controller. Args: port (int): batches in one training step easgd_alpha (float)
juraj-google-style
def get_metadata(self, key: str, per_trial: bool=True) -> Optional[Any]:
Gets metadata for current trial or current sampling. Args: key: A string as key to metadata. per_trial: If True, the key is retrieved per curent trial. Otherwise, it is retrieved per current sampling. Returns: A value that can be deserialized by `pg.from_json_str`.
github-repos
def _check_wiremap_validity(self, wire_map, keymap, valmap): for k, v in wire_map.items(): kname = "%s[%d]" % (k[0].name, k[1]) vname = "%s[%d]" % (v[0].name, v[1]) if k not in keymap: raise DAGCircuitError("invalid wire mapping key %s" % kname) ...
Check that the wiremap is consistent. Check that the wiremap refers to valid wires and that those wires have consistent types. Args: wire_map (dict): map from (register,idx) in keymap to (register,idx) in valmap keymap (dict): a map whose keys are wire_map keys valmap (dict): a map whose keys are wire_map values Rai...
juraj-google-style
def word_fts(self, word): return list(map(self.fts, self.segs(word)))
Return featural analysis of `word` Args: word (unicode): one or more IPA segments Returns: list: list of lists (value, feature) tuples where each inner list corresponds to a segment in `word`
codesearchnet
def batch_shape_tensor(self, name='batch_shape_tensor'): with self._name_scope(name): return self._batch_shape_tensor()
Shape of batch dimensions of this operator, determined at runtime. If this operator acts like the batch matrix `A` with `A.shape = [B1,...,Bb, M, N]`, then this returns a `Tensor` holding `[B1,...,Bb]`. Args: name: A name for this `Op`. Returns: `int32` `Tensor`
github-repos
def podcast_episodes(self, *, device_id=None): if (device_id is None): device_id = self.device_id podcast_episode_list = [] for chunk in self.podcast_episodes_iter(device_id=device_id, page_size=49995): podcast_episode_list.extend(chunk) return podcast_episode_list
Get a listing of podcast episodes for all subscribed podcasts. Paramaters: device_id (str, Optional): A mobile device ID. Default: Use ``device_id`` of the :class:`MobileClient` instance. Returns: list: Podcast episode dicts.
codesearchnet
def __init__(self, query): self._timeseries_list = list(query.iter(headers_only=True)) self._metric_type = query.metric_type
Initializes the QueryMetadata given the query object. Args: query: A Query object.
juraj-google-style
class DbrxAttentionConfig(PretrainedConfig): base_config_key = 'attn_config' def __init__(self, attn_pdrop: float=0.0, clip_qkv: Optional[float]=None, kv_n_heads: int=1, rope_theta: float=10000.0, **kwargs: Any): super().__init__(**kwargs) self.attn_pdrop = attn_pdrop self.clip_qkv = cl...
Configuration class for Dbrx Attention. [`DbrxAttention`] class. It is used to instantiate attention layers according to the specified arguments, defining the layers architecture. Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the documentation from [`Pretra...
github-repos
def uniform_row_length(self): return self._uniform_row_length
Returns the length of each row in this partition, if rows are uniform. If all rows in this `RowPartition` have the same length, then this returns that length as a scalar integer `Tensor`. Otherwise, it returns `None`. Returns: scalar Tensor with `type=self.dtype`, or `None`.
github-repos
def _test_dir(self, test_name): test_dir = os.path.join(self.get_temp_dir(), test_name) if os.path.isdir(test_dir): for f in glob.glob('%s/*' % test_dir): os.remove(f) else: os.makedirs(test_dir) return test_dir
Create an empty dir to use for tests. Args: test_name: Name of the test. Returns: Absolute path to the test directory.
github-repos
def try_claim(self, position): raise NotImplementedError
Atomically determines if a record at a split point is within the range. This method should be called **if and only if** the record is at a split point. This method may modify the internal state of the ``RangeTracker`` by updating the last-consumed position to ``position``. ** Thread safety ** Methods of the class ``...
github-repos
def up_to(self, term: str) -> str: end = self.input.find(term, self.offset) if (end < 0): raise EndOfInput(self) res = self.input[self.offset:end] self.offset = (end + 1) return res
Parse and return segment terminated by the first occurence of a string. Args: term: Terminating string. Raises: EndOfInput: If `term` does not occur in the rest of the input text.
codesearchnet
def advance_for_next_slice(self, recovery_slice=False): self.slice_start_time = None self.slice_request_id = None self.slice_retries = 0 self.acquired_once = False if recovery_slice: self.slice_id += 2 else: self.slice_id += 1
Advance self for next slice. Args: recovery_slice: True if this slice is running recovery logic. See handlers.MapperWorkerCallbackHandler._attempt_slice_recovery for more info.
codesearchnet
def log_every_n(level, msg, n, *args): count = _GetNextLogCountPerToken(_GetFileAndLine()) log_if(level, msg, not (count % n), *args)
Log 'msg % args' at level 'level' once per 'n' times. Logs the 1st call, (N+1)st call, (2N+1)st call, etc. Not threadsafe. Args: level: The level at which to log. msg: The message to be logged. n: The number of times this should be called before it is logged. *args: The args to be substituted into the msg.
juraj-google-style
def GetShadowMap(self, since=None): return ShadowUpdateGetter(self.conf).GetUpdates(source=self, search_base=self.conf['base'], search_filter=self.conf['filter'], search_scope=self.conf['scope'], since=since)
Return the shadow map from this source. Args: since: Get data only changed since this timestamp (inclusive) or None for all data. Returns: instance of ShadowMap
github-repos
def save_component(self, component_name, save_path): component = self.get_component(component_name=component_name) self._validate_savable(component=component, component_name=component_name) return component.save(sess=self.session, save_path=save_path)
Saves a component of this model to the designated location. Args: component_name: The component to save. save_path: The location to save to. Returns: Checkpoint path where the component was saved.
codesearchnet
def model(x): hidden_act = dense_layer(hidden_weights, x) logits_act = dense_layer(output_weights, hidden_act, tf.identity) y = tf.nn.softmax(logits_act) return y
Feed forward function of the model. Args: x: a (?, 28*28) tensor consisting of the feature inputs for a batch of examples. Returns: A (?, 10) tensor containing the class scores for each example.
github-repos
def swo_read(self, offset, num_bytes, remove=False): buf_size = ctypes.c_uint32(num_bytes) buf = (ctypes.c_uint8 * num_bytes)(0) self._dll.JLINKARM_SWO_Read(buf, offset, ctypes.byref(buf_size)) buf_size = buf_size.value if remove: self.swo_flush(buf_size) return list(buf)[:buf_size]
Reads data from the SWO buffer. The data read is not automatically removed from the SWO buffer after reading unless ``remove`` is ``True``. Otherwise the callee must explicitly remove the data by calling ``.swo_flush()``. Args: self (JLink): the ``JLink`` instance offset (int): offset of first byte to be retrieved n...
codesearchnet
def sholl_frequency(nrn, neurite_type=NeuriteType.all, step_size=10): nrns = neuron_population(nrn) neurite_filter = is_type(neurite_type) min_soma_edge = float('Inf') max_radii = 0 neurites_list = [] for neuron in nrns: neurites_list.extend(((neurites, neuron.soma.center) for neurites i...
perform Sholl frequency calculations on a population of neurites Args: nrn(morph): nrn or population neurite_type(NeuriteType): which neurites to operate on step_size(float): step size between Sholl radii Note: Given a neuron, the soma center is used for the concentric circles, which range from the soma radii, and th...
codesearchnet
def center_crop(self, image: 'torch.Tensor', size: SizeDict, **kwargs) -> 'torch.Tensor': if size.height is None or size.width is None: raise ValueError(f"The size dictionary must have keys 'height' and 'width'. Got {size.keys()}") image_height, image_width = image.shape[-2:] crop_height, crop_width...
Center crop an image to `(size["height"], size["width"])`. If the input size is smaller than `crop_size` along any edge, the image is padded with 0's and then center cropped. Args: image (`"torch.Tensor"`): Image to center crop. size (`Dict[str, int]`): Size of the output image. Returns: `torch.Tensor`: The center cr...
github-repos
def forward(self, hidden_features): hidden_features = hidden_features.transpose(-1, -2) if self.head_aggregation == 'use_last': hidden_features = hidden_features[..., -1] elif self.head_aggregation == 'max_pool': hidden_features = hidden_features.max(dim=-1).values elif self.head_aggrega...
Args: hidden_features (`torch.Tensor` of shape `(batch_size x num_patch x d_model)` in `flatten` mode or `(batch_size x n_vars x num_patch x d_model)` in `common_channel`/`mix_channel` mode.): Input hidden features. Returns: `torch.Tensor` of shape `(batch_size x num_targets)`.
github-repos
def process_rule(edges: Edges, ast: Function, rule: Mapping[(str, Any)], spec: BELSpec): ast_type = ast.__class__.__name__ trigger_functions = rule.get('trigger_function', []) trigger_types = rule.get('trigger_type', []) rule_subject = rule.get('subject') rule_relation = rule.get('relation') rul...
Process computed edge rule Recursively processes BELAst versus a single computed edge rule Args: edges (List[Tuple[Union[Function, str], str, Function]]): BEL Edge ASTs ast (Function): BEL Function AST rule (Mapping[str, Any]: computed edge rule
codesearchnet
def load_state(self, in_path): with open(in_path, 'r') as infile: state = json.load(infile) self.restore_state(state)
Load the current state of this emulated object from a file. The file should have been produced by a previous call to save_state. Args: in_path (str): The path to the saved state dump that you wish to load.
codesearchnet
def apply_op(input_layer, operation, *op_args, **op_kwargs): return input_layer.with_tensor( operation(input_layer.tensor, *op_args, **op_kwargs))
Applies the given operation to this before without adding any summaries. Args: input_layer: The input layer for this op. operation: An operation that takes a tensor and the supplied args. *op_args: Extra arguments for operation. **op_kwargs: Keyword arguments for the operation. Returns: A new layer with operation appl...
juraj-google-style
def convert_to_rgb(image: ImageInput) -> ImageInput: if not isinstance(image, PIL.Image.Image): return image if image.mode == 'RGB': return image image_rgba = image.convert('RGBA') background = Image.new('RGBA', image_rgba.size, (255, 255, 255)) alpha_composite = Image.alpha_composit...
Converts an image to RGB format. Only converts if the image is of type PIL.Image.Image, otherwise returns the image as is. Args: image (Image): The image to convert.
github-repos
def add(self, method_mask, path, func): is_err = (len(signature(func).parameters) == 3) is_subchain = isinstance(func, MiddlewareChain) tup = MiddlewareNode(func=func, mask=method_mask, path=path, is_errorhandler=is_err, is_subchain=is_subchain) self.mw_list.append(tup)
Add a function to the middleware chain. This function is returned when iterating over the chain with matching method and path. Args: method_mask (growler.http.HTTPMethod): A bitwise mask intended to match specific request methods. path (str or regex): An object with which to compare request urls func (callable): The f...
codesearchnet
def get_float(self, min_float=_MIN_FLOAT, max_float=_MAX_FLOAT): return self.fdp.ConsumeFloatInRange(min_float, max_float)
Consume a float with given constraints. Args: min_float: Minimum allowed float. max_float: Maximum allowed float. Returns: Consumed float based on input bytes and constraints.
github-repos
def _unary_op(cls, x: 'TensorFluent', op: Callable[[tf.Tensor], tf.Tensor], dtype: tf.DType) -> 'TensorFluent': x = x.cast(dtype) t = op(x.tensor) scope = x.scope.as_list() batch = x.batch return TensorFluent(t, scope, batch=batch)
Returns a TensorFluent for the unary `op` applied to fluent `x`. Args: x: The input fluent. op: The unary operation. dtype: The output's data type. Returns: A TensorFluent wrapping the unary operator's output.
juraj-google-style
def bleu_score(predictions, labels, **unused_kwargs): outputs = tf.to_int32(tf.argmax(predictions, axis=(- 1))) outputs = tf.squeeze(outputs, axis=[(- 1), (- 2)]) labels = tf.squeeze(labels, axis=[(- 1), (- 2)]) bleu = tf.py_func(compute_bleu, (labels, outputs), tf.float32) return (bleu, tf.constant...
BLEU score computation between labels and predictions. An approximate BLEU scoring method since we do not glue word pieces or decode the ids and tokenize the output. By default, we use ngram order of 4 and use brevity penalty. Also, this does not have beam search. Args: predictions: tensor, model predictions labels: ...
codesearchnet
def _ParseFileData(self, knowledge_base, file_object): text_file_object = dfvfs_text_file.TextFile(file_object, encoding='utf-8') product_values = {} for line in text_file_object.readlines(): line = line.strip() if line.startswith(' continue key, value = line.split('=') ...
Parses file content (data) for system product preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails.
juraj-google-style
def end_at(self, document_fields): return self._cursor_helper(document_fields, before=False, start=False)
End query results at a particular document value. The result set will **include** the document specified by ``document_fields``. If the current query already has specified an end cursor -- either via this method or :meth:`~.firestore_v1beta1.query.Query.end_before` -- this will overwrite it. When the query is sent t...
codesearchnet
def has_option(self, section, option): if section not in self.sections(): return False else: option = self.optionxform(option) return option in self[section]
Checks for the existence of a given option in a given section. Args: section (str): name of section option (str): name of option Returns: bool: whether the option exists in the given section
juraj-google-style
def publish(cls, message, client_filter=None): with cls._lock: for client in cls.subscribers: if (not client_filter) or client_filter(client): client.send(message)
Publish messages to subscribers. Args: message: The message to publish. client_filter: A filter function to call passing in each client. Only clients for whom the function returns True will have the message sent to them.
juraj-google-style
def time_series(timefile, colnames): if (not timefile.is_file()): return None data = pd.read_csv(timefile, delim_whitespace=True, dtype=str, header=None, skiprows=1, index_col=0, engine='c', memory_map=True, error_bad_lines=False, warn_bad_lines=False) data = data.apply(pd.to_numeric, raw=True, erro...
Read temporal series text file. If :data:`colnames` is too long, it will be truncated. If it is too short, additional numeric column names from 0 to N-1 will be attributed to the N extra columns present in :data:`timefile`. Args: timefile (:class:`pathlib.Path`): path of the time.dat file. colnames (list of names): n...
codesearchnet
async def verify_worker_impls(chain): valid_worker_impls = get_valid_worker_impls() for obj in chain.get_all_links_in_chain(): worker_impl = obj.worker_impl log.info("Verifying {} {} as a {} task...".format(obj.name, obj.task_id, worker_impl)) await valid_worker_im...
Verify the task type (e.g. decision, build) of each link in the chain. Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on failure
juraj-google-style
def get_group_by_id(self, group_id: str) -> typing.Optional['Group']: VALID_POSITIVE_INT.validate(group_id, 'get_group_by_id', exc=ValueError) for group in self.groups: if group.group_id == group_id: return group return None
Gets a group by id Args: group_id: group id Returns: Group
juraj-google-style
def reset(self, indices=None): if indices is None: indices = np.arange(len(self._envs)) if self._blocking: observs = [self._envs[index].reset() for index in indices] else: observs = [self._envs[index].reset(blocking=False) for index in indices] observs = [observ() for observ in ...
Reset the environment and convert the resulting observation. Args: indices: The batch indices of environments to reset; defaults to all. Returns: Batch of observations.
juraj-google-style
def _try_load_par_source(source_file_path): prefix_path = source_file_path while True: prefix_path, basename = os.path.split(prefix_path) if not basename: break suffix_path = os.path.normpath(os.path.relpath(source_file_path, start=prefix_path)) if prefix_path.endswit...
Try loading the source code inside a .par file. A .par file is a zip-compressed, self-contained Python executable. It contains the content of individual Python source files that can be read only through extracting from the zip file. Args: source_file_path: The full path to the file inside the .par file. This path sho...
github-repos
def sg_summary_activation(tensor, prefix=None, name=None): r prefix = '' if prefix is None else prefix + '/' name = prefix + _pretty_name(tensor) if name is None else prefix + name _scalar(name + '/ratio', tf.reduce_mean(tf.cast(tf.greater(tensor, 0), tf.sg_floatx))) _hist...
r"""Register `tensor` to summary report as `activation` Args: tensor: A `Tensor` to log as activation prefix: A `string`. A prefix to display in the tensor board web UI. name: A `string`. A name to display in the tensor board web UI. Returns: None
juraj-google-style
def query_foursquare(point, max_distance, client_id, client_secret): if (not client_id): return [] if (not client_secret): return [] if from_cache(FS_CACHE, point, max_distance): return from_cache(FS_CACHE, point, max_distance) url = (FOURSQUARE_URL % (client_id, client_secret, p...
Queries Squarespace API for a location Args: point (:obj:`Point`): Point location to query max_distance (float): Search radius, in meters client_id (str): Valid Foursquare client id client_secret (str): Valid Foursquare client secret Returns: :obj:`list` of :obj:`dict`: List of locations with the following format: { '...
codesearchnet
def _load_from_file(path): config = [] try: with open(path, 'r') as config_file: config = yaml.load(config_file)['normalizations'] except EnvironmentError as e: raise ConfigError('Problem while loading file: %s' % e....
Load a config file from the given path. Load all normalizations from the config file received as argument. It expects to find a YAML file with a list of normalizations and arguments under the key 'normalizations'. Args: path: Path to YAML file.
juraj-google-style
async def pipe_to_log(pipe, filehandles=(), level=logging.INFO): while True: line = await pipe.readline() if line: line = to_unicode(line) log.log(level, line.rstrip()) for filehandle in filehandles: print(line, file=filehandle, end="") ...
Log from a subprocess PIPE. Args: pipe (filehandle): subprocess process STDOUT or STDERR filehandles (list of filehandles, optional): the filehandle(s) to write to. If empty, don't write to a separate file. Defaults to (). level (int, optional): the level to log to. Defaults to ``logging.INFO``.
juraj-google-style
def compute_number_edges(function): n = 0 for node in function.nodes: n += len(node.sons) return n
Compute the number of edges of the CFG Args: function (core.declarations.function.Function) Returns: int
juraj-google-style
def loads(s, model): graphs = penman.loads(s, cls=XMRSCodec) xs = [model.from_triples(g.triples()) for g in graphs] return xs
Deserialize PENMAN graphs from a string Args: s (str): serialized PENMAN graphs model: Xmrs subclass instantiated from decoded triples Returns: a list of objects (of class *model*)
juraj-google-style
def _HandleMetadataUpdate( self, metadata_key='', recursive=True, wait=True, timeout=None, retry=True): exception = None while True: try: return self._GetMetadataUpdate( metadata_key=metadata_key, recursive=recursive, wait=wait, timeout=timeout) excep...
Wait for a successful metadata response. Args: metadata_key: string, the metadata key to watch for changes. recursive: bool, True if we should recursively watch for metadata changes. wait: bool, True if we should wait for a metadata change. timeout: int, timeout in seconds for returning metadata output. retry: bool, T...
juraj-google-style
def pretty_print_config_to_json(self, configs): descriptor = self.get_directory_list_doc(configs) return json.dumps(descriptor, sort_keys=True, indent=2, separators=(',', ': '))
JSON string description of a protorpc.remote.Service in a discovery doc. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: string, The directory list document as a JSON string.
codesearchnet
def resource_path(relative_path=None, expect=None): if (expect not in (None, 'file', 'folder')): raise ArgumentError("Invalid expect parameter, must be None, 'file' or 'folder'", expect=expect) this_dir = os.path.dirname(__file__) _resource_path = os.path.join(this_dir, '..', 'config') if (relat...
Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given pa...
codesearchnet
def forceSetSlaac(self, slaacAddress): print '%s call forceSetSlaac' % self.port print slaacAddress try: cmd = 'ipaddr add %s' % str(slaacAddress) print cmd return self.__sendCommand(cmd)[0] == 'Done' except Exception, e: ModuleHel...
force to set a slaac IPv6 address to Thread interface Args: slaacAddress: a slaac IPv6 address to be set Returns: True: successful to set slaac address to Thread interface False: fail to set slaac address to Thread interface
juraj-google-style
def _matrix_conv(self, m1, m2): n = m1[0, 0].shape.as_list()[0] if n != m2[0, 0].shape.as_list()[0]: raise ValueError(f'The entries in matrices m1 and m2 must have the same dimensions. Received m1[0, 0].shape={m1[0, 0].shape} and m2[0, 0].shape={m2[0, 0].shape}.') k = int(np.sqrt(len(m1))) l = i...
Matrix convolution. Args: m1: A k x k dictionary, each element is a n x n matrix. m2: A l x l dictionary, each element is a n x n matrix. Returns: (k + l - 1) * (k + l - 1) dictionary each element is a n x n matrix. Raises: ValueError: if the entries of m1 and m2 are of different dimensions.
github-repos
def _EnforceShapeInvariant(merge_var, next_var): if isinstance(merge_var, tensor_lib.Tensor): m_shape = merge_var.get_shape() n_shape = next_var.get_shape() if not _ShapeLessThanOrEqual(n_shape, m_shape): enter = merge_var.op.inputs[0].op assert util.IsLoopEnter(enter...
Check if the shapes of the loops variables are invariants. Args: merge_var: The tensor representing the initial values of the loop variables. next_var: The tensor representing the values of the loop variables after one loop iteration. Raises: ValueError: If any tensor in `merge_var` has a more specific shape than its...
github-repos
def process(self, element): input_ids = self._tokenizer(element, return_tensors='pt', padding='max_length', max_length=512).input_ids return input_ids
Process the raw text input to a format suitable for T5ForConditionalGeneration model inference Args: element: A string of text Returns: A tokenized example that can be read by the T5ForConditionalGeneration
github-repos
def __init__(self, token): r self.base_url = 'http: self.token = token self.geo_criteria = ['stid', 'state', 'country', 'county', 'radius', 'bbox', 'cwa', 'nwsfirezone', 'gacc', 'subgacc']
r""" Instantiates an instance of MesoPy. Arguments: ---------- token: string, mandatory Your API token that authenticates you for requests against MesoWest.mes Returns: -------- None. Raises: ------- None.
juraj-google-style
def all_logging_disabled(highest_level=logging.CRITICAL): previous_level = logging.root.manager.disable logging.disable(highest_level) try: (yield) finally: logging.disable(previous_level)
Disable all logging temporarily. A context manager that will prevent any logging messages triggered during the body from being processed. Args: highest_level: the maximum logging level that is being blocked
codesearchnet
def __init__(self, tid=None, stdout=None, stderr=None): self._tid = tid super().__init__() self.parent = None self._update_lock = threading.Lock() self._outputs = [] self._stdout = stdout self._stderr = stderr
Initialize the AppFuture. Args: KWargs: - tid (Int) : Task id should be any unique identifier. Now Int. - stdout (str) : Stdout file of the app. Default: None - stderr (str) : Stderr file of the app. Default: None
juraj-google-style