code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def getitem_slot(self, node: cfg.CFGNode, index_var: cfg.Variable) -> tuple[cfg.CFGNode, cfg.Variable]: results = [] unresolved = False node, ret = self.call_pytd(node, '__getitem__', index_var) if self.is_concrete: for val in index_var.bindings: try: index = self.ctx...
Implements __getitem__ for List. Arguments: node: The current CFG node. index_var: The Variable containing the index value, the i in lst[i]. Returns: Tuple of (node, return_variable). node may be the same as the argument. return_variable is a Variable with bindings of the possible return values.
github-repos
def from_millis(cls, timeout_ms): if hasattr(timeout_ms, 'has_expired'): return timeout_ms if timeout_ms is None: return cls(None) return cls(timeout_ms / 1000.0)
Create a new PolledTimeout if needed. If timeout_ms is already a PolledTimeout, just return it, otherwise create a new PolledTimeout with the given timeout in milliseconds. Args: timeout_ms: PolledTimeout object, or number of milliseconds to use for creating a new one. Returns: A PolledTimeout object that will expir...
juraj-google-style
def __deepcopy__(self, memo): with distribute_lib.enter_or_assert_strategy(self._distribute_strategy): v = copy.deepcopy(self._v, memo) copied_variable = type(self)(strategy=self._distribute_strategy, v=v, aggregation=self._aggregation) memo[id(self)] = copied_variable return copied_variable
Perform a deepcopy of the `AggregatingVariable`. Unlike the deepcopy of a regular tf.Variable, this keeps the original strategy and devices of the `AggregatingVariable`. To avoid confusion with the behavior of deepcopy on a regular `Variable` (which does copy into new devices), we only allow a deepcopy of a `Aggregat...
github-repos
def make_mapper(features): if (not features): features = Feature(input=[], transformer=NullTransformer()) if (not iterable(features)): features = (features,) return DataFrameMapper([t.as_input_transformer_tuple() for t in features], input_df=True)
Make a DataFrameMapper from a feature or list of features Args: features (Union[Feature, List[Feature]]): feature or list of features Returns: DataFrameMapper: mapper made from features
codesearchnet
def compute_classification_results(self, adv_batches, dataset_batches, dataset_meta, defense_work=None): class_batch_to_work = {} if defense_work: for v in itervalues(defense_work.work): class_batch_to_work[v['output_classification_batch_id']] = v accuracy_matrix = ResultMatrix() err...
Computes classification results. Args: adv_batches: instance of AversarialBatches dataset_batches: instance of DatasetBatches dataset_meta: instance of DatasetMetadata defense_work: instance of DefenseWorkPieces Returns: accuracy_matrix, error_matrix, hit_target_class_matrix, processed_images_count
codesearchnet
def index_bgen(fn, legacy=False): logger.info("Indexing {} (BGEN) using 'bgenix'{}".format(fn, (' (legacy mode)' if legacy else ''))) command = ['bgenix', '-g', fn, '-index'] if legacy: command.append('-with-rowid') try: logger.info("Executing '{}'".format(' '.join(command))) sub...
Indexes a BGEN file. Args: fn (str): The name of the BGEN file.
codesearchnet
def tensor_dimension_to_mesh_axis(self, tensor_dimension, mesh_shape): val = [i for i, mesh_dimension in enumerate(mesh_shape) if (tensor_dimension.name, mesh_dimension.name) in self._pairs] if len(val) > 1: raise ValueError( "Tensor dimension maps to multiple mesh dimensions" ...
Mesh axis associated with tensor dimension (or None). Args: tensor_dimension: Dimension. mesh_shape: Shape. Returns: Integer or None. Raises: ValueError: If one Tensor dimension maps to two mesh dimensions.
juraj-google-style
def _term(self, term): term = str(term) if term: self.__query["q"] += term return self
Add a term to the query. Arguments: term (str): The term to add. Returns: SearchHelper: Self
juraj-google-style
def sigmoid(x): return nn.sigmoid(x)
Element-wise sigmoid. Args: x: A tensor or variable. Returns: A tensor.
github-repos
def launch_minecraft(port, installdir="MalmoPlatform", replaceable=False): launch_script = './launchClient.sh' if os.name == 'nt': launch_script = 'launchClient.bat' cwd = os.getcwd() os.chdir(installdir) os.chdir("Minecraft") try: cmd = [launch_script, '-port', str(port), '...
Launch Minecraft listening for malmoenv connections. Args: port: the TCP port to listen on. installdir: the install dir name. Defaults to MalmoPlatform. Must be same as given (or defaulted) in download call if used. replaceable: whether or not to automatically restart Minecraft (default is false).
juraj-google-style
def get_parent(self, path): self.__validate_storage_path(path, projects_allowed=False) path_steps = [step for step in path.split('/') if step] del path_steps[(- 1)] parent_path = '/{0}'.format('/'.join(path_steps)) return self.api_client.get_entity_by_query(path=parent_path)
Get the parent entity of the entity pointed by the given path. Args: path (str): The path of the entity whose parent is needed Returns: A JSON object of the parent entity if found. Raises: StorageArgumentException: Invalid arguments StorageForbiddenException: Server response code 403 StorageNotFoundException: Server...
codesearchnet
def _WriteCacheFile(self, cache_filename, scopes): creds = {'scopes': sorted(list(scopes)), 'svc_acct_name': self.__service_account_name} creds_str = json.dumps(creds) cache_file = _MultiProcessCacheFile(cache_filename) try: cache_file.LockedWrite(creds_str) except KeyboardInterrupt: ...
Writes the credential metadata to the cache file. This does not save the credentials themselves (CredentialStore class optionally handles that after this class is initialized). Args: cache_filename: Cache filename to check. scopes: Scopes for the desired credentials.
codesearchnet
def write_value(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): try: ostream.write(pack('!Q', self.value)) except Exception: self.logger.error('Error writing boolean value to buffer') raise
Write the value of the Boolean object to the output stream. Args: ostream (Stream): A buffer to contain the encoded bytes of the value of a Boolean object. Usually a BytearrayStream object. Required. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, d...
codesearchnet
def do_test(create_module_fn, exported_names=None, show_debug_info=False): if exported_names is None: exported_names = [] logging.set_stderrthreshold('error') tf.enable_v2_behavior() def app_main(argv): if len(argv) > 1: raise app.UsageError('Too many command-line a...
Runs test. 1. Performs absl and tf "main"-like initialization that must run before almost anything else. 2. Converts `tf.Module` to SavedModel 3. Converts SavedModel to MLIR 4. Prints the textual MLIR to stdout (it is expected that the caller will have FileCheck checks in its file to check this output). This is only ...
github-repos
def block_diag(*blocks: np.ndarray) -> np.ndarray: for b in blocks: if (b.shape[0] != b.shape[1]): raise ValueError('Blocks must be square.') if (not blocks): return np.zeros((0, 0), dtype=np.complex128) n = sum((b.shape[0] for b in blocks)) dtype = functools.reduce(_merge_dt...
Concatenates blocks into a block diagonal matrix. Args: *blocks: Square matrices to place along the diagonal of the result. Returns: A block diagonal matrix with the given blocks along its diagonal. Raises: ValueError: A block isn't square.
codesearchnet
def _WriteAttributeContainer(self, attribute_container): if (attribute_container.CONTAINER_TYPE == self._CONTAINER_TYPE_EVENT): (timestamp, serialized_data) = self._serialized_event_heap.PopEvent() else: serialized_data = self._SerializeAttributeContainer(attribute_container) if (self.compre...
Writes an attribute container. The table for the container type must exist. Args: attribute_container (AttributeContainer): attribute container.
codesearchnet
def get_available_versions(self, project_name): available_versions = self.pypi_client.package_releases(project_name) if not available_versions: available_versions = self.pypi_client.package_releases( project_name.capitalize() ) return d...
Query PyPI to see if package has any available versions. Args: project_name (str): The name the project on PyPI. Returns: dict: Where keys are tuples of parsed versions and values are the versions returned by PyPI.
juraj-google-style
def contains(self, key): path = self.object_path(key) return os.path.exists(path) and os.path.isfile(path)
Returns whether the object named by `key` exists. Optimized to only check whether the file object exists. Args: key: Key naming the object to check. Returns: boalean whether the object exists
juraj-google-style
def scalar_input_map(func, input_): if util_iter.isiterable(input_): return list(map(func, input_)) else: return func(input_)
Map like function Args: func: function to apply input_ : either an iterable or scalar value Returns: If ``input_`` is iterable this function behaves like map otherwise applies func to ``input_``
codesearchnet
def load_types_for_deserialization(cls, *types_to_deserialize: Type[Any]) -> ContextManager[Dict[str, Type[Any]]]: return cls._TYPE_REGISTRY.load_types_for_deserialization(*types_to_deserialize)
Context manager for loading unregistered types for deserialization. Example:: class A(pg.Object): auto_register = False x: int class B(A): y: str with pg.JSONConvertile.load_types_for_deserialization(A, B): pg.from_json_str(A(1).to_json_str()) pg.from_json_str(B(1, 'hi').to_json_str()) Args: *types_to_deserialize: ...
github-repos
def dataframe(start_row=0, max_rows=None, use_cache=True): output = QueryOutput() output._output_type = 'dataframe' output._dataframe_start_row = start_row output._dataframe_max_rows = max_rows output._use_cache = use_cache return output
Construct a query output object where the result is a dataframe Args: start_row: the row of the table at which to start the export (default 0). max_rows: an upper limit on the number of rows to export (default None). use_cache: whether to use cached results or not (default True).
juraj-google-style
def docx_table_from_xml_node(table_node: ElementTree.Element, level: int, config: TextProcessingConfig) -> str: table = CustomDocxTable() for row_node in table_node: if row_node.tag != DOCX_TABLE_ROW: continue table.new_r...
Converts an XML node representing a DOCX table into a textual representation. Args: table_node: XML node level: current level in XML hierarchy (used for recursion; start level is 0) config: :class:`TextProcessingConfig` control object Returns: string representation
juraj-google-style
def mix_over_posterior_draws(means, variances): with tf.compat.v1.name_scope( 'mix_over_posterior_draws', values=[means, variances]): num_posterior_draws = dist_util.prefer_static_value( tf.shape(input=means))[0] component_observations = tfd.Independent( ...
Construct a predictive normal distribution that mixes over posterior draws. Args: means: float `Tensor` of shape `[num_posterior_draws, ..., num_timesteps]`. variances: float `Tensor` of shape `[num_posterior_draws, ..., num_timesteps]`. Returns: mixture_dist: `tfd.MixtureSameFamily(tfd.Independent(tfd.Normal))` inst...
juraj-google-style
class AssertEqual(beam.PTransform): def __init__(self, elements: Iterable[Any]): self._elements = elements def expand(self, pcoll): return assert_that(pcoll | beam.Map(lambda row: beam.Row(**row._asdict())), equal_to(dicts_to_rows(self._elements)))
Asserts that the input contains exactly the elements provided. This is primarily used for testing; it will cause the entire pipeline to fail if the input to this transform is not exactly the set of `elements` given in the config parameter. As with Create, YAML/JSON-style mappings are interpreted as Beam rows, e.g.:: ...
github-repos
def write(self, __text: str) -> None: if __text == os.linesep: self.handle.write(__text) else: frame = inspect.currentframe() if frame is None: filename = 'unknown' lineno = 0 else: outer = frame.f_b...
Write text to the debug stream. Args: __text: Text to write
juraj-google-style
def _replace_image(image_url, image_tag, ebook_folder, image_name=None): try: assert isinstance(image_tag, bs4.element.Tag) except AssertionError: raise TypeError(('image_tag cannot be of type ' + str(type(image_tag)))) if (image_name is None): image_name = str(uuid.uuid4()) try:...
Replaces the src of an image to link to the local copy in the images folder of the ebook. Tightly coupled with bs4 package. Args: image_url (str): The url of the image. image_tag (bs4.element.Tag): The bs4 tag containing the image. ebook_folder (str): The directory where the ebook files are being saved. This must cont...
codesearchnet
def _get_resource_list(self, rsrc_dict): if 'collections' in rsrc_dict: return rsrc_dict['collections'] if 'experiments' in rsrc_dict: return rsrc_dict['experiments'] if 'channels' in rsrc_dict: return rsrc_dict['channels'] if 'coords' in rsrc...
Extracts list of resources from the HTTP response. Args: rsrc_dict (dict): HTTP response encoded in a dictionary. Returns: (list[string]): List of a type of resource (collections, experiments, etc). Raises: (RuntimeError): If rsrc_dict does not contain any known resources.
juraj-google-style
def load_pip_addons(_globals): for package_name in known_pip_addons: (_, username) = package_username(package_name) try: load_addon(username, package_name.replace('-', '_'), _globals) except ImportError: pass
Load all known fabsetup addons which are installed as pypi pip-packages. Args: _globals(dict): the globals() namespace of the fabric script. Return: None
codesearchnet
def egress(self, envelope, http_headers, operation, binding_options): if self._logger.isEnabledFor(logging.INFO): service_name = operation.binding.wsdl.services.keys()[0] self._logger.info(_REQUEST_LOG_LINE, service_name, operation.name, binding_options['address']) if self._logger.isEnabledFor(l...
Overrides the egress function ror request logging. Args: envelope: An Element with the SOAP request data. http_headers: A dict of the current http headers. operation: The SoapOperation instance. binding_options: An options dict for the SOAP binding. Returns: A tuple of the envelope and headers.
codesearchnet
def send(email, subject=None, from_email=None, to_email=None, cc=None, bcc=None, reply_to=None, smtp=None): if is_string(email): email = EmailContent(email) from_email = sanitize_email_address((from_email or email.headers.get('from'))) to_email = sanitize_email_address((to_email or email.headers.get...
Send markdown email Args: email (str/obj): A markdown string or EmailContent object subject (str): subject line from_email (str): sender email address to_email (str/list): recipient email addresses cc (str/list): CC email addresses (string or a list) bcc (str/list): BCC email addresses (string or a list) reply_to (str...
codesearchnet
def read(self, vals): i = 0 {%- for field in fields %} {%- if field.is_list %} count = int(vals[i]) i += 1 for _ in range(count): obj = {{field.object_name}}() obj.read(vals[i:i + obj.field_count]) self.add_{{field.field_name}}...
Read values Args: vals (list): list of strings representing values
juraj-google-style
def save(self, filename=None, directory=None): if (filename is not None): self.filename = filename if (directory is not None): self.directory = directory filepath = self.filepath tools.mkdirs(filepath) data = text_type(self.source) with io.open(filepath, 'w', encoding=self.encodi...
Save the DOT source to file. Ensure the file ends with a newline. Args: filename: Filename for saving the source (defaults to ``name`` + ``'.gv'``) directory: (Sub)directory for source saving and rendering. Returns: The (possibly relative) path of the saved source file.
codesearchnet
def Add(self, service, method, request, global_params=None): method_config = service.GetMethodConfig(method) upload_config = service.GetUploadConfig(method) http_request = service.PrepareHttpRequest(method_config, request, global_params=global_params, upload_config=upload_config) api_request = self.ApiC...
Add a request to the batch. Args: service: A class inheriting base_api.BaseApiService. method: A string indicated desired method from the service. See the example in the class docstring. request: An input message appropriate for the specified service.method. global_params: Optional additional parameters to pass into m...
codesearchnet
def _read_parquet_columns(path, columns, num_splits, kwargs): import pyarrow.parquet as pq df = pq.read_pandas(path, columns=columns, **kwargs).to_pandas() return (_split_result_for_readers(0, num_splits, df) + [len(df.index)])
Use a Ray task to read columns from Parquet into a Pandas DataFrame. Note: Ray functions are not detected by codecov (thus pragma: no cover) Args: path: The path of the Parquet file. columns: The list of column names to read. num_splits: The number of partitions to split the column into. Returns: A list containing t...
codesearchnet
def break_bond(self, ind1, ind2, tol=0.2): sites = self._sites clusters = [[sites[ind1]], [sites[ind2]]] sites = [site for (i, site) in enumerate(sites) if (i not in (ind1, ind2))] def belongs_to_cluster(site, cluster): for test_site in cluster: if CovalentBond.is_bonded(site, test_...
Returns two molecules based on breaking the bond between atoms at index ind1 and ind2. Args: ind1 (int): Index of first site. ind2 (int): Index of second site. tol (float): Relative tolerance to test. Basically, the code checks if the distance between the sites is less than (1 + tol) * typical bond distances. Defaults...
codesearchnet
def _on_report(self, sequence, topic, message): try: conn_key = self._find_connection(topic) conn_id = self.conns.get_connection_id(conn_key) except ArgumentError: self._logger.warn("Dropping report message that does not correspond with a known connection, t...
Process a report received from a device. Args: sequence (int): The sequence number of the packet received topic (string): The topic this message was received on message (dict): The message itself
juraj-google-style
def random_channel_shift(x, intensity_range, channel_axis=0): intensity = np.random.uniform(-intensity_range, intensity_range) return apply_channel_shift(x, intensity, channel_axis=channel_axis)
Performs a random channel shift. DEPRECATED. Args: x: Input tensor. Must be 3D. intensity_range: Transformation intensity. channel_axis: Index of axis for channels in the input tensor. Returns: Numpy image tensor.
github-repos
def _apply_conv(self, inputs, w): if self._data_format == DATA_FORMAT_NWC: h_dim = 1 two_dim_conv_data_format = DATA_FORMAT_NHWC else: h_dim = 2 two_dim_conv_data_format = DATA_FORMAT_NCHW inputs = tf.expand_dims(inputs, axis=h_dim) two_dim_conv_stride = self.stride[:h_dim]...
Apply a `separable_conv2d` operation on `inputs` using `w`. Args: inputs: A Tensor of shape `data_format` and of type `tf.float16`, `tf.bfloat16` or `tf.float32`. w: A tuple of weight matrices of the same type as `inputs`, the first being the depthwise weight matrix, and the second being the pointwise weight matrix. ...
juraj-google-style
def load_dot_env_file(dot_env_path): if not os.path.isfile(dot_env_path): return {} logger.log_info("Loading environment variables from {}".format(dot_env_path)) env_variables_mapping = {} with io.open(dot_env_path, 'r', encoding='utf-8') as fp: for line in fp: ...
load .env file. Args: dot_env_path (str): .env file path Returns: dict: environment variables mapping { "UserName": "debugtalk", "Password": "123456", "PROJECT_KEY": "ABCDEFGH" } Raises: exceptions.FileFormatError: If .env file format is invalid.
juraj-google-style
def price(self, market: pmd.ProcessedMarketData, name: Optional[str]=None) -> types.FloatTensor: name = name or self._name + '_price' with tf.name_scope(name): discount_curve = cashflow_streams.get_discount_curve(self._discount_curve_type, market, self._discount_curve_mask) currencies = [cur.cur...
Returns the present value of the American options. Args: market: An instance of `ProcessedMarketData`. name: Python str. The name to give to the ops created by this function. Default value: `None` which maps to 'price'. Returns: A `Tensor` of shape `batch_shape` containing the modeled price of each American option c...
github-repos
def write(self, face, data, viewport=None, *, alignment=1) -> None: if (type(data) is Buffer): data = data.mglo self.mglo.write(face, data, viewport, alignment)
Update the content of the texture. Args: face (int): The face to update. data (bytes): The pixel data. viewport (tuple): The viewport. Keyword Args: alignment (int): The byte alignment of the pixels.
codesearchnet
def _parse_dbpath(dbpath): if isinstance(dbpath, list): dbpath = '|'.join(dbpath) if (not dbpath.endswith('$')): dbpath = ('(%s)$' % dbpath) return dbpath
Converts the dbpath to a regexp pattern. Transforms dbpath from a string or an array of strings to a regexp pattern which will be used to match database names. Args: dbpath: a string or an array containing the databases to be matched from a cluster. Returns: A regexp pattern that will match any of the desired databa...
codesearchnet
def window_unpartition(windows, window_size, pad_height_width, height_width): padded_height, padded_width = pad_height_width height, width = height_width batch_size = windows.shape[0] hidden_state = windows.view(batch_size, padded_height hidden_state = hidden_state.permute(0, 1, 3, 2, 4, 5).contig...
Window unpartition into original sequences and removing padding. Args: windows (`torch.Tensor`): Input tokens with [batch_size * num_windows, window_size, window_size, num_channels]. window_size (`int`): Window size. pad_height_width (`Tuple[int]`): Padded height and width (padded_height, padded_width). height_width (...
github-repos
def from_text_vision_configs(cls, text_config: Dict, vision_config: Dict, **kwargs): config_dict = {} config_dict['text_config'] = text_config config_dict['vision_config'] = vision_config return cls.from_dict(config_dict, **kwargs)
Instantiate a [`Owlv2Config`] (or a derived class) from owlv2 text model configuration and owlv2 vision model configuration. Returns: [`Owlv2Config`]: An instance of a configuration object
github-repos
def retry(self, retry_message=''): if not self.async: raise UnexpectedPipelineError( 'May only call retry() method for asynchronous pipelines.') if self.try_cancel(): self._context.transition_retry(self._pipeline_key, retry_message) return True else: return False
Forces a currently running asynchronous pipeline to retry. Note this may not be called by synchronous or generator pipelines. Those must instead raise the 'Retry' exception during execution. Args: retry_message: Optional message explaining why the retry happened. Returns: True if the Pipeline should be retried, Fals...
juraj-google-style
def gumbel_softmax(x, z_size, mode, softmax_k=0, temperature_warmup_steps=150000, summary=True, name=None): with tf.variable_scope(name, default_name='gumbel_softmax'): m = tf.layers.dense(x, (2 ** z_size), name='mask') if (softmax_k > 0): (m, kl) = top_k_softmax(m, softmax_k) ...
Gumbel softmax discretization bottleneck. Args: x: Input to the discretization bottleneck. z_size: Number of bits, where discrete codes range from 1 to 2**z_size. mode: tf.estimator.ModeKeys. softmax_k: If > 0 then do top-k softmax. temperature_warmup_steps: Number of steps it takes to decay temperature to 0. summary:...
codesearchnet
def temporal_latent_to_dist(name, x, hparams, output_channels=None): _, _, width, _, res_channels = common_layers.shape_list(x) if output_channels is None: output_channels = res_channels dilation_rates = get_dilation_rates(hparams, width) with tf.variable_scope(name, reuse=tf.AUTO_REUSE): h = x f...
Network that maps a time-indexed list of 3-D latents to a gaussian. Args: name: variable scope. x: List of 4-D Tensors indexed by time, (NHWC) hparams: tf.contrib.training.Hparams. output_channels: int, Number of channels of the output gaussian mean. Returns: dist: tfp.distributions.Normal
juraj-google-style
def _save(filename, tensor_names, tensors, tensor_slices=None, name='save'): if tensor_slices is None: return gen_io_ops.save(filename, tensor_names, tensors, name=name) else: return gen_io_ops.save_slices(filename, tensor_names, tensor_slices, tensors, name=name)
Save a list of tensors to a file with given names. Example usage without slice info: Save("/foo/bar", ["w", "b"], [w, b]) Example usage with slices: Save("/foo/bar", ["w", "w"], [slice0, slice1], tensor_slices=["4 10 0,2:-", "4 10 2,2:-"]) Args: filename: the file name of the sstable. tensor_names: a list of strings...
github-repos
def HashFilePath(self, path, byte_count): with open(path, "rb") as fd: self.HashFile(fd, byte_count)
Updates underlying hashers with file on a given path. Args: path: A path to the file that is going to be fed to the hashers. byte_count: A maximum numbers of bytes that are going to be processed.
juraj-google-style
def one_of(self, chset: str) -> str: res = self.peek() if res in chset: self.offset += 1 return res raise UnexpectedInput(self, "one of " + chset)
Parse one character form the specified set. Args: chset: string of characters to try as alternatives. Returns: The character that was actually matched. Raises: UnexpectedInput: If the next character is not in `chset`.
juraj-google-style
def pnlSingle( self, account: str = '', modelCode: str = '', conId: int = 0) -> List[PnLSingle]: return [v for v in self.wrapper.pnlSingles.values() if (not account or v.account == account) and (not modelCode or v.modelCode == modelCode) and ...
List of subscribed :class:`.PnLSingle` objects (profit and loss for single positions). The :class:`.PnLSingle` objects are kept live updated. Args: account: If specified, filter for this account name. modelCode: If specified, filter for this account model. conId: If specified, filter for this contract ID.
juraj-google-style
def _create_key_func(extractor, none_is_largest): if none_is_largest: def key_func_none_is_largest(session_group): value = extractor(session_group) return (value is None, value) return key_func_none_is_largest def key_func_none_is_smallest(session_group): value = extractor(session_group) ...
Returns a key_func to be used in list.sort(). Returns a key_func to be used in list.sort() that sorts session groups by the value extracted by extractor. 'None' extracted values will either be considered largest or smallest as specified by the "none_is_largest" boolean parameter. Args: extractor: An extractor functio...
juraj-google-style
def time(func, *args, **kwargs): start_time = time_module.time() func(*args, **kwargs) end_time = time_module.time() return (end_time - start_time)
Call the supplied function with the supplied arguments, and return the total execution time as a float in seconds. The precision of the returned value depends on the precision of `time.time()` on your platform. Arguments: func: the function to run. *args: positional arguments to pass into the function. **kwargs: keyw...
codesearchnet
def words(self, index = None): if index is None: return self.select(Word,None,True,default_ignore_structure) else: if index < 0: index = self.count(Word,None,True,default_ignore_structure) + index for i, e in enumerate(self.select(Word,None,Tr...
Returns a generator of Word elements found (recursively) under this element. Arguments: * ``index``: If set to an integer, will retrieve and return the n'th element (starting at 0) instead of returning the list of all
juraj-google-style
def print_tools(self, pattern=None, buf=sys.stdout): seen = set() rows = [] context = self.context if context: data = context.get_tools() conflicts = set(context.get_conflicting_tools().keys()) for (_, (variant, tools)) in sorted(data.items()): pkg_str = variant.quali...
Print a list of visible tools. Args: pattern (str): Only list tools that match this glob pattern.
codesearchnet
def _getAuthenticated(self, browser, url): try: if (len(self.creds) > 0): c = random.choice(self.creds)[0] browser.setNewPassword(url, c.user, c.password) return True else: raise NoCredentialsException(str(self)) except AttributeError as e: ...
Getting authenticated. This method may be overwritten. TODO: update to version 2 of the wrappers. Args: ----- browser: The browser in which the user will be authenticated. url: The URL to get authenticated in. Return: ------- True or False. Raises: ------ NoCredentialsException: If no valid credentials have been f...
codesearchnet
def get_tree_starting_at(module: str, edges: List[Tuple[str, str]]) -> List[Union[str, List[str]]]: vertices_seen = [module] new_edges = [edge for edge in edges if edge[0] == module and edge[1] != module and ('__init__.py' not in edge[1])] tree = [module] while len(new_edges) > 0: tree.append(ne...
Returns the tree starting at a given module following all edges. Args: module (`str`): The module that will be the root of the subtree we want. eges (`List[Tuple[str, str]]`): The list of all edges of the tree. Returns: `List[Union[str, List[str]]]`: The tree to print in the following format: [module, [list of edges ...
github-repos
def get_destination(self, filepath, targetdir=None): dst = self.change_extension(filepath, 'css') if targetdir: dst = os.path.join(targetdir, dst) return dst
Return destination path from given source file path. Destination is allways a file with extension ``.css``. Args: filepath (str): A file path. The path is allways relative to sources directory. If not relative, ``targetdir`` won't be joined. absolute (bool): If given will be added at beginning of file path. Returns:...
codesearchnet
def preprocess_bel_stmt(stmt: str) -> str: stmt = stmt.strip() stmt = re.sub(r",+", ",", stmt) stmt = re.sub(r",", ", ", stmt) stmt = re.sub(r" +", " ", stmt) return stmt
Clean up basic formatting of BEL statement Args: stmt: BEL statement as single string Returns: cleaned BEL statement
juraj-google-style
def get_sub_category(alt_len, ref_len, category, svtype=None): subcategory = '' if category in ('snv', 'indel', 'cancer'): if ref_len == alt_len: subcategory = 'snv' else: subcategory = 'indel' elif category == 'sv': subcategory = svtype return subc...
Get the subcategory for a VCF variant The sub categories are: 'snv', 'indel', 'del', 'ins', 'dup', 'bnd', 'inv' Args: alt_len(int) ref_len(int) category(str) svtype(str) Returns: subcategory(str)
juraj-google-style
def from_flag(cls, flagname, flag_values, other_flag_values=None): first_module = flag_values.find_module_defining_flag(flagname, default='<unknown>') if (other_flag_values is None): second_module = _helpers.get_calling_module() else: second_module = other_flag_values.find_module_defining_fl...
Creates a DuplicateFlagError by providing flag name and values. Args: flagname: str, the name of the flag being redefined. flag_values: FlagValues, the FlagValues instance containing the first definition of flagname. other_flag_values: FlagValues, if it is not None, it should be the FlagValues object where the second ...
codesearchnet
def log_uuid(self, uuid): if ((uuid not in self.uuids) and (uuid in uuids)): self.uuids[uuid] = uuids[uuid].describe()
Logs the object with the specified `uuid` to `self.uuids` if possible. Args: uuid (str): string value of :meth:`uuid.uuid4` value for the object.
codesearchnet
def list_partitions(self, table, retry=DEFAULT_RETRY): table = _table_arg_to_table_ref(table, default_project=self.project) meta_table = self.get_table(TableReference(self.dataset(table.dataset_id, project=table.project), ('%s$__PARTITIONS_SUMMARY__' % table.table_id))) subset = [col for col in meta_table.s...
List the partitions in a table. Arguments: table (Union[ \ :class:`~google.cloud.bigquery.table.Table`, \ :class:`~google.cloud.bigquery.table.TableReference`, \ str, \ ]): The table or reference from which to get partition info retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: List[str]:...
codesearchnet
def __getitem__(self, pkg_id): if pkg_id in self.__reg_software: return self.__reg_software[pkg_id] else: raise KeyError(pkg_id)
Returns information on a package. Args: pkg_id (str): Package Id of the software/component Returns: dict or list: List if ``version_only`` is ``True`` otherwise dict
juraj-google-style
def prepare_for_model(self, ids: List[int], pair_ids: Optional[List[int]]=None, add_special_tokens: bool=True, padding: Union[bool, str, PaddingStrategy]=False, truncation: Union[bool, str, TruncationStrategy, None]=None, max_length: Optional[int]=None, stride: int=0, pad_to_multiple_of: Optional[int]=None, padding_sid...
Prepares a sequence of input id, or a pair of sequences of inputs ids so that it can be used by the model. It adds special tokens, truncates sequences if overflowing while taking into account the special tokens and manages a moving window (with user defined stride) for overflowing tokens. Please Note, for *pair_ids* di...
github-repos
def to_json_string(self) -> str: dictionary = self.to_dict() for key, value in dictionary.items(): if isinstance(value, np.ndarray): dictionary[key] = value.tolist() _processor_class = dictionary.pop('_processor_class', None) if _processor_class is not None: dictionary['proce...
Serializes this instance to a JSON string. Returns: `str`: String containing all the attributes that make up this feature_extractor instance in JSON format.
github-repos
def binary_cross_entropy_loss_with_logits(x, target, name=None): with tf.name_scope(name, 'binary_cross_entropy_with_logits', [x, target]) as scope: x.get_shape().assert_is_compatible_with(target.get_shape()) neg_softplus = -tf.nn.softplus(-x) return -tf.add(tf.multiply(target, neg...
Calculates the binary cross entropy between sigmoid(x) and target. Expects unscaled logits. Do not pass in results of sigmoid operation. Args: x: the calculated pre-sigmoid values target: the desired values. name: the name for this op, defaults to binary_cross_entropy_with_logits Returns: -(target * -softplus(-x) + (...
juraj-google-style
def forward(self, layer_input): bsz, length, emb_size = layer_input.size() layer_input = layer_input.reshape(-1, emb_size) _, batch_index, batch_gates, expert_size, router_logits = self.router(layer_input) expert_inputs = layer_input[batch_index] hidden_states = self.input_linear(expert_inputs, expe...
Forward pass of the mixture of experts layer. Args: layer_input (Tensor): Input tensor. Returns: Tensor: Output tensor. Tensor: Router logits.
github-repos
def get_model_schema_and_features(model_dir): schema_file = os.path.join(model_dir, 'assets.extra', 'schema.json') schema = json.loads(file_io.read_file_to_string(schema_file)) features_file = os.path.join(model_dir, 'assets.extra', 'features.json') features_config = json.loads(file_io.read_file_to_string(fe...
Get a local model's schema and features config. Args: model_dir: local or GCS path of a model. Returns: A tuple of schema (list) and features config (dict).
juraj-google-style
def claim(self, file_readers): (prefix_to_reader, unclaimed_readers) = self._find_strelka_files(file_readers) prefix_by_patients = self._split_prefix_by_patient(prefix_to_reader) self._validate_vcf_readers(prefix_by_patients) vcf_readers = self._create_vcf_readers(prefix_to_reader) return (unclaimed...
Recognizes and claims Strelka VCFs form the set of all input VCFs. Each defined caller has a chance to evaluate and claim all the incoming files as something that it can process. Args: file_readers: the collection of currently unclaimed files Returns: A tuple of unclaimed readers and StrelkaVcfReaders.
codesearchnet
def __init__(self, listener, dispatcher): logger.info('Creating %s', ClearlyServer.__name__) self.listener = listener self.dispatcher = dispatcher
Constructs a server instance. Args: listener (EventListener): the object that listens and keeps celery events dispatcher (StreamingDispatcher): the mechanism to dispatch data to clients
juraj-google-style
def automatic_linemode(divisions, ibz): kpoints = list() labels = list() for path in ibz.kpath["path"]: kpoints.append(ibz.kpath["kpoints"][path[0]]) labels.append(path[0]) for i in range(1, len(path) - 1): kpoints.append(ibz.kpath["kp...
Convenient static constructor for a KPOINTS in mode line_mode. gamma centered Monkhorst-Pack grids and the number of subdivisions along each reciprocal lattice vector determined by the scheme in the VASP manual. Args: divisions: Parameter determining the number of k-points along each hight symetry lines. ibz: HighSymm...
juraj-google-style
def _CopyFromDateTimeValues(self, date_time_values): year = date_time_values.get('year', 0) month = date_time_values.get('month', 0) day_of_month = date_time_values.get('day_of_month', 0) hours = date_time_values.get('hours', 0) minutes = date_time_values.get('minutes', 0) seconds = date_ti...
Copies time elements from date and time values. Args: date_time_values (dict[str, int]): date and time values, such as year, month, day of month, hours, minutes, seconds, microseconds.
juraj-google-style
def fit(self, x, augment=False, rounds=1, seed=None): x = np.asarray(x, dtype=self.dtype) if x.ndim != 4: raise ValueError('Input to `.fit()` should have rank 4. Got array with shape: ' + str(x.shape)) if x.shape[self.channel_axis] not in {1, 3, 4}: warnings.warn('Expected input to be images...
Fits the data generator to some sample data. This computes the internal data stats related to the data-dependent transformations, based on an array of sample data. Only required if `featurewise_center` or `featurewise_std_normalization` or `zca_whitening` are set to `True`. When `rescale` is set to a value, rescalin...
github-repos
def add_one(self, url: str, url_properties: Optional[URLProperties]=None, url_data: Optional[URLData]=None): self.add_many([AddURLInfo(url, url_properties, url_data)])
Add a single URL to the table. Args: url: The URL to be added url_properties: Additional values to be saved url_data: Additional data to be saved
juraj-google-style
def norm(self, coords: Vector3Like, frac_coords: bool = True) -> float: return np.sqrt(self.dot(coords, coords, frac_coords=frac_coords))
Compute the norm of vector(s). Args: coords: Array-like object with the coordinates. frac_coords: Boolean stating whether the vector corresponds to fractional or cartesian coordinates. Returns: one-dimensional `numpy` array.
juraj-google-style
def _safe_close(self, sess: session.Session): try: sess.close() except Exception: pass
Closes a session without raising an exception. Just like sess.close() but ignores exceptions. Args: sess: A `Session`.
github-repos
def show_inputs(self, varnames=None, nids=None, wslice=None, stream=sys.stdout): if varnames is not None: varnames = [s.strip() for s in list_strings(varnames)] dlist = collections.defaultdict(list) for task in self.select_tasks(nids=nids, wslice=wslice)...
Print the input of the tasks to the given stream. Args: varnames: List of Abinit variables. If not None, only the variable in varnames are selected and printed. nids: List of node identifiers. By defaults all nodes are shown wslice: Slice object used to select works. stream: File-like object, Default: sys.stdout
juraj-google-style
def eval(self, session=None): raise NotImplementedError
In a session, computes and returns the value of this variable. This is not a graph construction method, it does not add ops to the graph. This convenience method requires a session where the graph containing this variable has been launched. If no session is passed, the default session is used. See `tf.compat.v1.Sess...
github-repos
def _ParseMRUListKey(self, parser_mediator, registry_key, codepage='cp1252'): try: mrulist = self._ParseMRUListValue(registry_key) except (ValueError, errors.ParseError) as exception: parser_mediator.ProduceExtractionWarning( 'unable to parse MRUList value with error: {0!s}'.format(ex...
Extract event objects from a MRUList Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. codepage (Optional[str]): extended ASCII string codepage.
juraj-google-style
def get_sanger_unevaluated(store, institute_id, user_id): sanger_ordered_by_case = store.sanger_ordered(institute_id, user_id) unevaluated = [] for item in sanger_ordered_by_case: case_id = item['_id'] case_obj = store.case(case_id=case_id) if not case...
Get all variants for an institute having Sanger validations ordered but still not evaluated Args: store(scout.adapter.MongoAdapter) institute_id(str) Returns: unevaluated: a list that looks like this: [ {'case1': [varID_1, varID_2, .., varID_n]}, {'case2' : [varID_1, varID_2, .., varID_n]} ], where the keys are case_...
juraj-google-style
def __init__(self, min_bundle_size=0, desired_bundle_size=DEFAULT_DESIRED_BUNDLE_SIZE, columns=None, with_filename=False, label='ReadAllFiles'): super().__init__() source_from_file = partial(_ParquetSource, min_bundle_size=min_bundle_size, columns=columns) self._read_all_files = filebasedsource.ReadAllFiles...
Initializes ``ReadAllFromParquet``. Args: min_bundle_size: the minimum size in bytes, to be considered when splitting the input into bundles. desired_bundle_size: the desired size in bytes, to be considered when splitting the input into bundles. columns: list of columns that will be read from files. A column name may ...
github-repos
def _infer_state_dtype(explicit_dtype, state): if explicit_dtype is not None: return explicit_dtype elif nest.is_nested(state): inferred_dtypes = [element.dtype for element in nest.flatten(state)] if not inferred_dtypes: raise ValueError(f'Unable to infer dtype from argument ...
Infer the dtype of an RNN state. Args: explicit_dtype: explicitly declared dtype or None. state: RNN's hidden state. Must be a Tensor or a nested iterable containing Tensors. Returns: dtype: inferred dtype of hidden state. Raises: ValueError: if `state` has heterogeneous dtypes or is empty.
github-repos
def root(self) -> bytes: retry_interval = ADB_ROOT_RETRY_ATTEMPT_INTERVAL_SEC for attempt in range(ADB_ROOT_RETRY_ATTEMPTS): try: return self._exec_adb_cmd('root', args=None, shell=False, timeout=None, stderr=None) except AdbError as e: if attempt + 1 < ADB_ROOT_RETRY_ATT...
Enables ADB root mode on the device. This method will retry to execute the command `adb root` when an AdbError occurs, since sometimes the error `adb: unable to connect for root: closed` is raised when executing `adb root` immediately after the device is booted to OS. Returns: A string that is the stdout of root comm...
github-repos
def links(res: requests.models.Response, search: str=None, pattern: str=None) -> list: hrefs = [link.to_text() for link in find_all_links(res.text)] if search: hrefs = [href for href in hrefs if (search in href)] if pattern: hrefs = [href for href in hrefs if re.findall(pattern, href)] r...
Get the links of the page. Args: res (requests.models.Response): The response of the page. search (str, optional): Defaults to None. Search the links you want. pattern (str, optional): Defaults to None. Search the links use a regex pattern. Returns: list: All the links of the page.
codesearchnet
def draw(self, filename, color=True): verify_dependencies(['pgv']) if (not hasattr(self, '_results')): raise RuntimeError('Graph cannot be drawn before it is executed. Try calling run() first.') g = pgv.AGraph(directed=True) g.node_attr['colorscheme'] = 'set312' for elem in self._results: ...
Render a plot of the graph via pygraphviz. Args: filename (str): Path to save the generated image to. color (bool): If True, will color graph nodes based on their type, otherwise will draw a black-and-white graph.
codesearchnet
def _get_or_load_domain(self, domain): if isinstance(domain, six.string_types): if (domain in self.domains): return self.domains[domain] elif exists(domain): with open(domain, 'r') as fobj: domain = json.load(fobj) else: raise ValueError("N...
Return a domain if one already exists, or create a new one if not. Args: domain (str, dict): Can be one of: - The name of the Domain to return (fails if none exists) - A path to the Domain configuration file - A dictionary containing configuration information
codesearchnet
def inter_data_operation(self, axis, func, other): if axis: partitions = self.row_partitions other_partitions = other.row_partitions else: partitions = self.column_partitions other_partitions = other.column_partitions func = self.preproces...
Apply a function that requires two BaseFrameManager objects. Args: axis: The axis to apply the function over (0 - rows, 1 - columns) func: The function to apply other: The other BaseFrameManager object to apply func to. Returns: A new BaseFrameManager object, the type of object that called this.
juraj-google-style
def chain_break_frequency(samples, embedding): counts = {v: 0 for v in embedding} total = 0 for sample in samples: for (v, chain) in iteritems(embedding): vals = [sample[u] for u in chain] if (not _all_equal(vals)): counts[v] += 1 total += 1 return...
Determines the frequency of chain breaks in the given samples. Args: samples (iterable): An iterable of samples where each sample is a dict of the form {v: val, ...} where v is a variable in the target graph and val is the associated value as determined by a binary quadratic model sampler. embedding (dict): The mappin...
codesearchnet
def removeRouter(self, xRouterId): print '%s call removeRouter' % self.port print xRouterId routerId = '' routerId = self.__convertRlocToRouterId(xRouterId) print routerId if routerId == None: print 'no matched xRouterId' return False ...
kick router with a given router id from the Thread Network Args: xRouterId: a given router id in hex format Returns: True: successful to remove the router from the Thread Network False: fail to remove the router from the Thread Network
juraj-google-style
def _CreateAnalysisPlugins(self, options): if (not self._analysis_plugins): return {} analysis_plugins = analysis_manager.AnalysisPluginManager.GetPluginObjects(self._analysis_plugins) for analysis_plugin in analysis_plugins.values(): helpers_manager.ArgumentHelperManager.ParseOptions(option...
Creates the analysis plugins. Args: options (argparse.Namespace): command line arguments. Returns: dict[str, AnalysisPlugin]: analysis plugins and their names.
codesearchnet
def documentation(self, level='first'): docs = (t.docstring for t in (list(self.conjunction.terms) + [self]) if (t.docstring is not None)) if (level.lower() == 'first'): doc = next(docs, None) elif (level.lower() == 'top'): doc = list(docs) return doc
Return the documentation of the type. By default, this is the first docstring on a top-level term. By setting *level* to `"top"`, the list of all docstrings on top-level terms is returned, including the type's `docstring` value, if not `None`, as the last item. The docstring for the type itself is available via :attr:...
codesearchnet
def create(self, domain, type_name, search_command, body): return self._request(domain, type_name, search_command, 'POST', body)
Create entry in ThreatConnect Data Store Args: domain (string): One of 'local', 'organization', or 'system'. type_name (string): This is a free form index type name. The ThreatConnect API will use this resource verbatim. search_command (string): Search command to pass to ES. body (str): JSON serialized data.
codesearchnet
def _convert_ddb_list_to_list(conversion_list): ret_list = [] for v in conversion_list: for v1 in v: ret_list.append(v[v1]) return ret_list
Given a dynamodb list, it will return a python list without the dynamodb datatypes Args: conversion_list (dict): a dynamodb list which includes the datatypes Returns: list: Returns a sanitized list without the dynamodb datatypes
juraj-google-style
def transform_absolute_coords(self, width, height): if (self.type != EventType.POINTER_MOTION_ABSOLUTE): raise AttributeError(_wrong_meth.format(self.type)) abs_x = self._libinput.libinput_event_pointer_get_absolute_x_transformed(self._handle, width) abs_y = self._libinput.libinput_event_pointer_get...
Return the current absolute coordinates of the pointer event, transformed to screen coordinates. For pointer events that are not of type :attr:`~libinput.constant.EventType.POINTER_MOTION_ABSOLUTE`, this method raises :exc:`AttributeError`. Args: width (int): The current output screen width. height (int): The current...
codesearchnet
def getJsonFromApi(view, request): jsonText = view(request) jsonText = json.loads(jsonText.content.decode('utf-8')) return jsonText
Return json from querying Web Api Args: view: django view function. request: http request object got from django. Returns: json format dictionary
codesearchnet
def force_checkpoint_conversion(value=True): global _FORCE_CHECKPOINT_CONVERSION _FORCE_CHECKPOINT_CONVERSION = value
Forces checkpoint to use the new implementation. The new checkpoint implementation is changing the saved metadata slightly, and therefore may break forward compatibility in newly saved checkpoints. This means: - Previous versions of TensorFlow may not be able to load new checkpoints. - Backwards compatibility is unch...
github-repos
def nack(self, items): self.modify_ack_deadline( [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] ) self.drop([requests.DropRequest(*item) for item in items])
Explicitly deny receipt of messages. Args: items(Sequence[NackRequest]): The items to deny.
juraj-google-style
def get_help_usage(command): if not command: doc = get_primary_command_usage() elif command in ('-a', '--all'): subcommands = [k for k in settings.subcommands if k is not None] available_commands = subcommands + ['help'] command_doc = '\nAvailable commands:\n{}\n'.forma...
Print out a help message and exit the program. Args: command: If a command value is supplied then print the help message for the command module if available. If the command is '-a' or '--all', then print the standard help message but with a full list of available commands. Raises: ValueError: Raised if the help messa...
juraj-google-style
def _choose_random_edge(self, edges: Set[EDGE]) -> Optional[EDGE]: if edges: index = self._rand.randint(len(edges)) for e in edges: if not index: return e index -= 1 return None
Picks random edge from the set of edges. Args: edges: Set of edges to pick from. Returns: Random edge from the supplied set, or None for empty set.
juraj-google-style