code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _check_rules(browser, rules_js, config): if (config.rules_to_run is None): msg = 'No accessibility rules were specified to check.' log.warning(msg) return None rules = config.rules_to_run if rules: rules_config = u'auditConfig.auditRulesToRun = {rules};'.format(rules=rule...
Check the page for violations of the configured rules. By default, all rules in the ruleset will be checked. Args: browser: a browser instance. rules_js: the ruleset JavaScript as a string. config: an AxsAuditConfig instance. Returns: A namedtuple with 'errors' and 'warnings' fields whose values are the errors and wa...
codesearchnet
def name_changed(self, changed_item): name = str(changed_item.text()) if name != '': if name != self.selected_element_name: self.elements_from_file[name] = self.elements_from_file[self.selected_element_name] del self.elements_from_file[self....
checks if name has been changed and ignores the name change if the changed_item is an existing script Args: changed_item:
juraj-google-style
def discrete_bottleneck(self, x): x_reshaped = self.slice_hidden(x) x_means_hot = [] x_means = 0 loss = 0 x_means_hot, x_means, q_loss, e_loss = self.embedding_lookup( x_reshaped, self.means) if self.hparams.ema: tf.logging.info("Using EMA with beta = {}".format(self.hparams....
Discretization bottleneck for latent variables. Args: x: Input to the discretization bottleneck. Returns: Embedding to pass to the decoder, discrete latent, loss, and the embedding function. Raises: ValueError: If projection_tensors is None for reshape_method project, or ema_count or ema_means is None if we are usin...
juraj-google-style
def on_run_end(self, request): self._is_run_start = False if request.performed_action == framework.OnRunStartAction.DEBUG_RUN: partition_graphs = None if request.run_metadata and request.run_metadata.partition_graphs: partition_graphs = request.run_metadata.partition_graphs e...
Overrides on-run-end callback. Actions taken: 1) Load the debug dump. 2) Bring up the Analyzer CLI. Args: request: An instance of OnSessionInitRequest. Returns: An instance of OnSessionInitResponse.
github-repos
def extractDates(self, inp): def merge(param): (day, time) = param if (not (day or time)): return None if (not day): return time if (not time): return day return datetime.datetime(day.year, day.month, day.day, time.hour, time.minute) d...
Extract semantic date information from an input string. In effect, runs both parseDay and parseTime on the input string and merges the results to produce a comprehensive datetime object. Args: inp (str): Input string to be parsed. Returns: A list of datetime objects containing the extracted dates from the input snipp...
codesearchnet
def wp_decode(self, sequences): decode_strs = [seq.replace(' ', '') for seq in self.wp_tokenizer.batch_decode(sequences)] return decode_strs
Convert a list of lists of word piece token ids into a list of strings by calling word piece tokenizer. Args: sequences (`torch.Tensor`): List of tokenized input ids. Returns: `List[str]`: The list of wp decoded sentences.
github-repos
def _get_function(self, name): return self._functions.get(compat.as_str(name), None)
Returns the function definition for 'name'. Args: name: string function name. Returns: The function def proto.
github-repos
def playback_trajectory(env, ep_dir): xml_path = os.path.join(ep_dir, 'model.xml') with open(xml_path, 'r') as f: env.reset_from_xml_string(f.read()) state_paths = os.path.join(ep_dir, 'state_*.npz') t = 0 for state_file in sorted(glob(state_paths)): print(state_file) dic = n...
Playback data from an episode. Args: ep_dir: The path to the directory containing data for an episode.
codesearchnet
def update_tag(self, tag_name, description=None, custom_properties=None, **kwargs): data = {'description': description or '', 'customProperties': custom_properties or {}} resp = self._put(self._u(self._TAG_ENDPOINT_SUFFIX, tag_name), d...
update a tag by name Args: tag_name (string): name of tag to update description (optional[string]): a description custom_properties (optional[dict]): dictionary of custom properties
juraj-google-style
def _convert_id_to_token(self, artists_index, genres_index, lyric_index): artist = self.artists_decoder.get(artists_index) genres = [self.genres_decoder.get(genre) for genre in genres_index] lyrics = [self.lyrics_decoder.get(character) for character in lyric_index] return (artist, genres, lyrics)
Converts an index (integer) in a token (str) using the vocab. Args: artists_index (`int`): Index of the artist in its corresponding dictionary. genres_index (`Union[List[int], int]`): Index of the genre in its corresponding dictionary. lyric_index (`List[int]`): List of character indices, which each correspond to a ch...
github-repos
def get_special_tokens_mask(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None, already_has_special_tokens: bool=False) -> List[int]: if already_has_special_tokens: return super().get_special_tokens_mask(token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True) ...
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_spe...
github-repos
def is_link(path): if (sys.getwindowsversion().major < 6): raise SaltInvocationError('Symlinks are only supported on Windows Vista or later.') try: return salt.utils.path.islink(path) except Exception as exc: raise CommandExecutionError(exc)
Check if the path is a symlink This is only supported on Windows Vista or later. Inline with Unix behavior, this function will raise an error if the path is not a symlink, however, the error raised will be a SaltInvocationError, not an OSError. Args: path (str): The path to a file or directory Returns: bool: True i...
codesearchnet
def to_dict(self, fields=None): data = {} def _add(field): return (fields is None or field in fields) if _add("resolved_packages"): resolved_packages = [] for pkg in (self._resolved_packages or []): resolved_packages.append(pkg.handl...
Convert context to dict containing only builtin types. Args: fields (list of str): If present, only write these fields into the dict. This can be used to avoid constructing expensive fields (such as 'graph') for some cases. Returns: dict: Dictified context.
juraj-google-style
def labels(self, main_type, sub_type, unique_id, owner=None, filters=None, params=None): params = params or {} if owner: params['owner'] = owner if filters and filters.filters: params['filters'] = filters.filters_string if not sub_type: url =...
Args: main_type: sub_type: unique_id: owner: filters: params: Return:
juraj-google-style
def graph_def(self): return self._graph.as_graph_def(add_shapes=self._add_shapes)
A serializable version of the underlying TensorFlow graph. Returns: A graph_pb2.GraphDef proto containing nodes for all of the Operations in the underlying TensorFlow graph.
github-repos
def transformer_prepare_encoder(inputs, target_space, hparams, features=None): ishape_static = inputs.shape.as_list() encoder_input = inputs if features and "inputs_segmentation" in features: inputs_segmentation = features["inputs_segmentation"] inputs_position = features["inputs_position"] ta...
Prepare one shard of the model for the encoder. Args: inputs: a Tensor. target_space: a Tensor. hparams: run hyperparameters features: optionally pass the entire features dictionary as well. This is needed now for "packed" datasets. Returns: encoder_input: a Tensor, bottom of encoder stack encoder_self_attention_bias...
juraj-google-style
def _get_index_points(self, index_points=None): if self._index_points is None and index_points is None: raise ValueError( 'This GaussianProcess instance was not instantiated with a value for ' 'index_points. One must therefore be provided when calling sample, ' 'log_prob, an...
Return `index_points` if not None, else `self._index_points`. Args: index_points: if given, this is what is returned; else, `self._index_points` Returns: index_points: the given arg, if not None, else the class member `self._index_points`. Rases: ValueError: if `index_points` and `self._index_points` are both `None`...
juraj-google-style
def transformer_encoder_ffn_unit(x, hparams, nonpadding_mask=None, pad_remover=None): with tf.variable_scope('ffn'): if (hparams.transformer_ffn_type == 'fc'): y = transformer.transformer_ffn_layer(common_layers.layer_preprocess(x, hparams), hparams, pad_remover, conv_padding='SAME', nonpadding_...
Applies a feed-forward function which is parametrised for encoding. Args: x: input hparams: model hyper-parameters nonpadding_mask: optional Tensor with shape [batch_size, encoder_length] indicating what positions are not padding. This is used to mask out padding in convoltutional layers. We generally only need this...
codesearchnet
def _parse_price(html_chunk): price = get_first_content(html_chunk.find('div', {'class': 'prices'})) if (not price): return None price = dhtmlparser.removeTags(price) price = price.split('\n')[(- 1)] return price
Parse price of the book. Args: html_chunk (obj): HTMLElement containing slice of the page with details. Returns: str/None: Price as string with currency or None if not found.
codesearchnet
def __init__(self, config, auth: str) -> None: self.config = config self.auth = auth self.columns = SA_FIELDS self.reportId = None
Construct a report factory, providing project and authentication data. This class will track the reportID internally if the request call is used. Args: config, required - see: starthinker/util/configuration.py auth, required - either "user" or "service" used to create and/or read the report. Returns: None
github-repos
def from_config(cls, config, custom_objects=None, columns_by_name=None): return cls._from_config(config, custom_objects, columns_by_name)
Creates a FeatureColumn from its config. This method should be the reverse of `get_config`, capable of instantiating the same FeatureColumn from the config dictionary. See `get_config` for an example of common (de)serialization practices followed in this file. TODO(b/118939620): This is a private method until consens...
github-repos
def sample_uniform(domain, rng): if isinstance(domain, hp.IntInterval): return rng.randint(domain.min_value, domain.max_value) elif isinstance(domain, hp.RealInterval): return rng.uniform(domain.min_value, domain.max_value) elif isinstance(domain, hp.Discrete): return rng.choice(doma...
Sample a value uniformly from a domain. Args: domain: An `IntInterval`, `RealInterval`, or `Discrete` domain. rng: A `random.Random` object; defaults to the `random` module. Raises: TypeError: If `domain` is not a known kind of domain. IndexError: If the domain is empty.
codesearchnet
def post_process_object_detection(self, outputs, threshold: float=0.5, target_sizes: Union[TensorType, List[Tuple]]=None, top_k: int=100): out_logits, out_bbox = (outputs.logits, outputs.pred_boxes) if target_sizes is not None: if len(out_logits) != len(target_sizes): raise ValueError('Make ...
Converts the raw output of [`DeformableDetrForObjectDetection`] into final bounding boxes in (top_left_x, top_left_y, bottom_right_x, bottom_right_y) format. Only supports PyTorch. Args: outputs ([`DetrObjectDetectionOutput`]): Raw outputs of the model. threshold (`float`, *optional*): Score threshold to keep object d...
github-repos
def notify_progress(self, conn_string, operation, finished, total, wait=True): if (operation not in self.PROGRESS_OPERATIONS): raise ArgumentError('Invalid operation for progress event: {}'.format(operation)) event = dict(operation=operation, finished=finished, total=total) if wait: return s...
Send a progress event. Progress events can be sent for ``debug`` and ``script`` operations and notify the caller about the progress of these potentially long-running operations. They have two integer properties that specify what fraction of the operation has been completed. Args: conn_string (str): The device that i...
codesearchnet
def condition_indices(df): eigvals = eigenvalues(df) cond_idx = np.sqrt(eigvals.max() / eigvals) return pd.Series(cond_idx, df.columns, name='Condition index')
Returns a pandas Series with condition indices of the df columns. Args: df: pandas DataFrame with columns to run diagnostics on
juraj-google-style
def get_user(self, user_id=None, user_name=None): if user_id: endpoint = '/api/user_id/{0}'.format(user_id) elif user_name: endpoint = '/api/user_name/{0}'.format(user_name) else: endpoint = '/api/user' data = self._make_request(verb='GET', endpoint=endpoint) try: ret...
Get a user object from the API. If no ``user_id`` or ``user_name`` is specified, it will return the User object for the currently authenticated user. Args: user_id (int): User ID of the user for whom you want to get information. [Optional] user_name(str): Username for the user for whom you want to get information. [Op...
codesearchnet
def to_json(self, variables=None): variables_to_resolve = [] if variables: for key, value in variables.items(): variables_to_resolve.append(Variable(key, value)) for k in self.get_parameter_definitions(): if not variables or k not in variables: ...
Render the blueprint and return the template in json form. Args: variables (dict): Optional dictionary providing/overriding variable values. Returns: str: the rendered CFN JSON template
juraj-google-style
def post_process_image_text_to_text(self, generated_outputs, skip_special_tokens=True, **kwargs): beginning_of_answer = self.tokenizer.convert_tokens_to_ids(BEGINNING_OF_ANSWER_STRING) unpadded_output_sequences = [seq[(seq == beginning_of_answer).nonzero(as_tuple=True)[0] + 1:] for seq in generated_outputs] ...
Post-processes the output of `FuyuForConditionalGeneration` to only return the text output. Args: generated_outputs (`torch.Tensor` or `np.ndarray`): The output of the model. The output is expected to be a tensor of shape `(batch_size, sequence_length)` containing the token ids of the generated sequences. skip_special...
github-repos
def strip_number(self): if (self.type != EventType.TABLET_PAD_STRIP): raise AttributeError(_wrong_prop.format(self.type)) return self._libinput.libinput_event_tablet_pad_get_strip_number(self._handle)
The number of the strip that has changed state, with 0 being the first strip. On tablets with only one strip, this method always returns 0. For events not of type :attr:`~libinput.constant.EventType.TABLET_PAD_STRIP`, this property raises :exc:`AttributeError`. Returns: int: The index of the strip that changed state...
codesearchnet
def get_dict(self, only_attributes=None, exclude_attributes=None, df_format=False): to_exclude = ['coach_bsites', 'coach_ec', 'coach_go_mf', 'coach_go_bp', 'coach_go_cc'] if (not exclude_attributes): excluder = to_exclude else: excluder = ssbio.utils.force_list(exclude_attributes) ex...
Summarize the I-TASSER run in a dictionary containing modeling results and top predictions from COACH Args: only_attributes (str, list): Attributes that should be returned. If not provided, all are returned. exclude_attributes (str, list): Attributes that should be excluded. df_format (bool): If dictionary values shou...
codesearchnet
def on_test_end(self, logs=None):
Called at the end of evaluation or validation. Subclasses should override for any actions to run. Args: logs: Dict. Currently the output of the last call to `on_test_batch_end()` is passed to this argument for this method but that may change in the future.
github-repos
def _matmul_3d_with_map_fn(a, b, **kwargs): if isinstance(b, ragged_tensor.RaggedTensor) and (b.ragged_rank == 2 or kwargs.get('transpose_b') or kwargs.get('adjoint_b')): output_ragged_rank = 2 else: output_ragged_rank = 1 def single_batch_matmul(x): out = _matmul_2d(x[0], x[1], **k...
Multiplies batches of 2D matrices using map_fn. `output[n, i, k]` = sum_j (a[n, i, j] * b[n, j, k])` (for all `n`, `i`, `k`). Requires that `a[n, i].nrows()` == `b[n].nrows()` (for all `n` and `i`). Args: a: A 3D Tensor or RaggedTensor with `shape=[B, I, J]`, where dimensions `I` and `J` may be ragged. b: A 3D Tenso...
github-repos
def _sim_timestamps(self, max_rate, bg_rate, emission, i_start, rs, ip_start=0, scale=10, sort=True): counts_chunk = sim_timetrace_bg(emission, max_rate, bg_rate, self.t_step, rs=rs) nrows = emission.shape[0] if (bg_rate is not None): nrows += 1 assert (counts_chunk.shape == (nrows, emission.sha...
Simulate timestamps from emission trajectories. Uses attributes: `.t_step`. Returns: A tuple of two arrays: timestamps and particles.
codesearchnet
async def get_person(self, id_): data = await self._get_person_json( id_, OrderedDict(append_to_response='movie_credits') ) return Person.from_json(data, self.config['data'].get('images'))
Retrieve person data by ID. Arguments: id_ (:py:class:`int`): The person's TMDb ID. Returns: :py:class:`~.Person`: The requested person.
juraj-google-style
def _add_deprecation_notice_to_docstring(docstring, message): if docstring: return f'{docstring}\n\n.. deprecated:: {message}' else: return f'.. deprecated:: {message}'
Adds a deprecation notice to a docstring. Args: docstring: The original docstring (can be None or empty). message: The deprecation message to add. Returns: The modified docstring.
github-repos
def convert_code(in_file, out_file, in_alg='taudem', out_alg='arcgis', datatype=None): FileClass.check_file_exists(in_file) in_alg = in_alg.lower() out_alg = out_alg.lower() if in_alg not in FlowModelConst.d8_dirs or out_alg not in FlowModelConst.d8_dirs: raise Runti...
convert D8 flow direction code from one algorithm to another. Args: in_file: input raster file path out_file: output raster file path in_alg: available algorithms are in FlowModelConst.d8_dirs. "taudem" is the default out_alg: same as in_alg. "arcgis" is the default datatype: default is None and use the datatype of the...
juraj-google-style
def parameterized_send(self, request, parameter_list): response_queues = OrderedDict() for parameter in parameter_list: response_queues[parameter] = self.send((request % parameter)) return response_queues
Send batched requests for a list of parameters Args: request (str): Request to send, like "%s.*?\n" parameter_list (list): parameters to format with, like ["TTLIN", "TTLOUT"] Returns: dict: {parameter: response_queue}
codesearchnet
def ParseFromHumanReadable(self, string): if (not string): return None match = self.REGEX.match(string.strip().lower()) if (not match): raise DecodeError(('Unknown specification for ByteSize %s' % string)) multiplier = self.DIVIDERS.get(match.group(2)) if (not multiplier): ra...
Parse a human readable string of a byte string. Args: string: The string to parse. Raises: DecodeError: If the string can not be parsed.
codesearchnet
def load_from_file(self, yamlfile, _override=True, _allow_undeclared=False): self._logger.info('Loading configuration from file: %s', yamlfile) try: parsed_yaml = self._modules['yaml'].safe_load(yamlfile.read()) except self._modules['yaml'].YAMLError: self._logger.exception('Problem parsin...
Loads the configuration from a file. Parsed contents must be a single dict mapping config key to value. Args: yamlfile: The opened file object to load configuration from. See load_from_dict() for other args' descriptions. Raises: ConfigurationInvalidError: If configuration file can't be read, or can't be parsed as e...
juraj-google-style
def merge_translations(localization_bundle_path): logging.info('Merging translations') for lang_dir in os.listdir(localization_bundle_path): if (lang_dir == DEFAULT_LANGUAGE_DIRECTORY_NAME): continue for translated_path in glob.glob(os.path.join(localization_bundle_path, lang_dir, ('...
Merges the new translation with the old one. The translated files are saved as '.translated' file, and are merged with old translated file. Args: localization_bundle_path (str): The path to the localization bundle.
codesearchnet
def get_build_tool_version(self): with open(('%s/%s/build.gradle' % (self.path, self.src_folder))) as f: for line in f.readlines(): if ('buildToolsVersion' in line): matches = re.findall('buildToolsVersion \\"(.+?)\\"', line) if (len(matches) == 1): ...
Gets the build tool version to be used by zipalign from build.gradle file. Returns: A string containing the build tool version, default is 23.0.2.
codesearchnet
def oem(self): buf = (ctypes.c_char * self.MAX_BUF_SIZE)() res = self._dll.JLINKARM_GetOEMString(ctypes.byref(buf)) if res != 0: raise errors.JLinkException('Failed to grab OEM string.') oem = ctypes.string_at(buf).decode() if len(oem) == 0: ...
Retrieves and returns the OEM string of the connected J-Link. Args: self (JLink): the ``JLink`` instance Returns: The string of the OEM. If this is an original SEGGER product, then ``None`` is returned instead. Raises: JLinkException: on hardware error.
juraj-google-style
def grid_destroy_from_name(job_name): jobs = grid_reload_from_name(job_name) for job in jobs: job.delete() logger.info("Killing the job (%s, %s)" % (job.site, job.uid))
Destroy all the jobs with a given name. Args: job_name (str): the job name
juraj-google-style
def _get_endpoint(self, sub_domain): storage_parameters = self._storage_parameters or dict() account_name = storage_parameters.get('account_name') if not account_name: raise ValueError('"account_name" is required for Azure storage') suffix = storage_parameters.get(...
Get endpoint information from storage parameters. Update system with endpoint information and return information required to define roots. Args: self (pycosio._core.io_system.SystemBase subclass): System. sub_domain (str): Azure storage sub-domain. Returns: tuple of str: account_name, endpoint_suffix
juraj-google-style
def query_api_version(self): version_resp = self._session.get('/api/version', logon_required=False) self._api_version = version_resp return self._api_version
The Query API Version operation returns information about the level of Web Services API supported by the HMC. This operation does not require authentication. Returns: :term:`json object`: A JSON object with members ``api-major-version``, ``api-minor-version``, ``hmc-version`` and ``hmc-name``. For details about thes...
codesearchnet
def GetFileSystemTypeIndicators(cls, path_spec, resolver_context=None): if ((cls._file_system_remainder_list is None) or (cls._file_system_store is None)): (specification_store, remainder_list) = cls._GetSpecificationStore(definitions.FORMAT_CATEGORY_FILE_SYSTEM) cls._file_system_remainder_list = re...
Determines if a file contains a supported file system types. Args: path_spec (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators.
codesearchnet
def Dump(obj, sort_keys = False, encoder = None): text = json.dumps( obj, indent=2, sort_keys=sort_keys, ensure_ascii=False, cls=encoder, separators=_SEPARATORS) if compatibility.PY2 and isinstance(text, bytes): text = text.deco...
Stringifies a Python object into its JSON representation. Args: obj: A Python object to convert to JSON. sort_keys: If True, output dictionaries keys in sorted (ascending) order. encoder: An (optional) encoder class to use. Returns: A JSON representation of the given object.
juraj-google-style
def _segment_reduce(values, index, segment_reduce_fn, name): flat_index = flatten(index) vector_shape = values.size()[len(index.indices.size()):] flattened_shape = torch.cat([torch.as_tensor([-1], dtype=torch.long), torch.as_tensor(vector_shape, dtype=torch.long)], dim=0) flat_values = values.reshape(fl...
Applies a segment reduction segment-wise. Args: values (`torch.Tensor`): Tensor with segment values. index (`IndexMap`): IndexMap. segment_reduce_fn (`str`): Name for the reduce operation. One of "sum", "mean", "max" or "min". name (`str`): Name for the operation. Currently not used Returns: (`IndexMap`): IndexMap of...
github-repos
def __init__(self, index, port = 8081): self.index = index self.server = None self.port = port if port else find_free_port() self.settings = index.columns self.docs = index.docs self._create_settings() self.html_path = get_cur_path()+'/data/table/' ...
Table Constructor todo::make sure this is memory efficient Args: Index (Index): An Index object with a valid .query method and a .columns attribute. Returns: A table object Usage example >>> Table(ind)
juraj-google-style
def get_by_provider_display_name(self, provider_display_name): san_managers = self._client.get_all() result = [x for x in san_managers if x['providerDisplayName'] == provider_display_name] return result[0] if result else None
Gets a SAN Manager by provider display name. Args: provider_display_name: Name of the Provider Display Name Returns: dict: SAN Manager.
juraj-google-style
def __init__(self, config: JetMoeConfig, layer_idx: Optional[int]=None): super().__init__() self.config = config self.layer_idx = layer_idx self.is_causal = True if layer_idx is None: logger.warning_once(f'Instantiating {self.__class__.__name__} without passing a `layer_idx` is not recommend...
Initialize the JetMoeAttention module. Args: config: Configuration object with model hyperparameters. layer_idx: Index of the layer in the model.
github-repos
def scale_geom_opt_threshold(self, gradient=0.1, displacement=0.1, energy=0.1): if ((gradient < (1.0 / (300 - 1))) or (displacement < (1.0 / (1200 - 1))) or (energy < (1.0 / (100 - 1)))): raise ValueError('The geometry optimization convergence criteria is too tight') self.params['rem']['geom_opt_tol_gra...
Adjust the convergence criteria of geometry optimization. Args: gradient: the scale factor for gradient criteria. If less than 1.0, you are tightening the threshold. The base value is 300 × 10E−6 displacement: the scale factor for atomic displacement. If less then 1.0, you are tightening the threshold. The base value ...
codesearchnet
def _verify_time_range(payload_dict): now = int(time.time()) issued_at = payload_dict.get('iat') if (issued_at is None): raise AppIdentityError('No iat field in token: {0}'.format(payload_dict)) expiration = payload_dict.get('exp') if (expiration is None): raise AppIdentityError('No ...
Verifies the issued at and expiration from a JWT payload. Makes sure the current time (in UTC) falls between the issued at and expiration for the JWT (with some skew allowed for via ``CLOCK_SKEW_SECS``). Args: payload_dict: dict, A dictionary containing a JWT payload. Raises: AppIdentityError: If there is no ``'iat'...
codesearchnet
def _AddFileDescriptor(self, file_desc): if not isinstance(file_desc, descriptor.FileDescriptor): raise TypeError('Expected instance of descriptor.FileDescriptor.') self._file_descriptors[file_desc.name] = file_desc
Adds a FileDescriptor to the pool, non-recursively. If the FileDescriptor contains messages or enums, the caller must explicitly register them. Args: file_desc: A FileDescriptor.
juraj-google-style
def display(port=None, height=None): _display(port=port, height=height, print_message=True, display_handle=None)
Display a TensorBoard instance already running on this machine. Args: port: The port on which the TensorBoard server is listening, as an `int`, or `None` to automatically select the most recently launched TensorBoard. height: The height of the frame into which to render the TensorBoard UI, as an `int` number of pixels...
codesearchnet
def get_vulnerability_chains(current_node, sink, def_use, chain=[]): for use in def_use[current_node]: if (use == sink): (yield chain) else: vuln_chain = list(chain) vuln_chain.append(use) (yield from get_vulnerability_chains(use, sink, def_use, vuln_c...
Traverses the def-use graph to find all paths from source to sink that cause a vulnerability. Args: current_node() sink() def_use(dict): chain(list(Node)): A path of nodes between source and sink.
codesearchnet
def ParseRecord(self, parser_mediator, key, structure): if key not in ('header', 'header_signature', 'logline'): raise errors.ParseError( 'Unable to parse record, unknown structure: {0:s}'.format(key)) if key == 'logline': self._ParseLogLine(parser_mediator, structure) elif key ...
Parses a log record structure and produces events. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. key (str): identifier of the structure of tokens. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file....
juraj-google-style
def resize_bytes(fobj, old_size, new_size, offset): if new_size < old_size: delete_size = old_size - new_size delete_at = offset + new_size delete_bytes(fobj, delete_size, delete_at) elif new_size > old_size: insert_size = new_size - old_size insert_at = offset + ol...
Resize an area in a file adding and deleting at the end of it. Does nothing if no resizing is needed. Args: fobj (fileobj) old_size (int): The area starting at offset new_size (int): The new size of the area offset (int): The start of the area Raises: IOError
juraj-google-style
def derive_value(self, value): return IonEvent( self.event_type, self.ion_type, value, self.field_name, self.annotations, self.depth )
Derives a new event from this one setting the ``value`` attribute. Args: value: (any): The value associated with the derived event. Returns: IonEvent: The newly generated non-thunk event.
juraj-google-style
def for_default_graph(*args, **kwargs): graph = tf.get_default_graph() collection = graph.get_collection(_BOOKKEEPER) if collection: if (args or kwargs): raise ValueError(('Requesting construction of a BookKeeper that already exists: %s %s' % (args, kwargs))) return collection[0]...
Creates a bookkeeper for the default graph. Args: *args: Arguments to pass into Bookkeeper's constructor. **kwargs: Arguments to pass into Bookkeeper's constructor. Returns: A new Bookkeeper. Raises: ValueError: If args or kwargs are provided and the Bookkeeper already exists.
codesearchnet
def restart(self, container, timeout=10): params = {'t': timeout} url = self._url("/containers/{0}/restart", container) conn_timeout = self.timeout if conn_timeout is not None: conn_timeout += timeout res = self._post(url, params=params, timeout=conn_timeout)...
Restart a container. Similar to the ``docker restart`` command. Args: container (str or dict): The container to restart. If a dict, the ``Id`` key is used. timeout (int): Number of seconds to try to stop for before killing the container. Once killed it will then be restarted. Default is 10 seconds. Raises: :py:class:...
juraj-google-style
def get_enabled_features(self, user_id, attributes=None): enabled_features = [] if not self.is_valid: self.logger.error(enums.Errors.INVALID_DATAFILE.format('get_enabled_features')) return enabled_features if not isinstance(user_id, string_types): self.logger.error(enums.Errors.INVA...
Returns the list of features that are enabled for the user. Args: user_id: ID for user. attributes: Dict representing user attributes. Returns: A list of the keys of the features that are enabled for the user.
juraj-google-style
def nack(self, items): self.modify_ack_deadline([requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items]) self.drop([requests.DropRequest(*item) for item in items])
Explicitly deny receipt of messages. Args: items(Sequence[NackRequest]): The items to deny.
codesearchnet
def _validate_observation_data(kernel, observation_index_points, observations): ndims = kernel.feature_ndims if (tensorshape_util.is_fully_defined(observation_index_points.shape[:(- ndims)]) and tensorshape_util.is_fully_defined(observations.shape)): index_point_count = observation_index_points.shape[:(...
Ensure that observation data and locations have consistent shapes. This basically means that the batch shapes are broadcastable. We can only ensure this when those shapes are fully statically defined. Args: kernel: The GP kernel. observation_index_points: the observation data locations in the index set. observations...
codesearchnet
def _ParseCommentRecord(self, structure): comment = structure[1] if comment.startswith('Version'): _, _, self._version = comment.partition(':') elif comment.startswith('Software'): _, _, self._software = comment.partition(':') elif comment.startswith('Time'): _, _, time_format = c...
Parse a comment and store appropriate attributes. Args: structure (pyparsing.ParseResults): parsed log line.
juraj-google-style
def pre_release_work(patch: bool=False): default_version = get_version() if patch and default_version.is_devrelease: raise ValueError("Can't create a patch version from the dev branch, checkout a released version!") if default_version.is_devrelease: default_version = default_version.base_ver...
Do all the necessary pre-release steps: - figure out the next minor release version and ask confirmation - update the version everywhere - clean-up the model list in the main README Args: patch (`bool`, *optional*, defaults to `False`): Whether or not this is a patch release.
github-repos
def _process_new(self, feed_item): return {'assetIdentifier': {'name': feed_item.get(FieldMap.CREATIVE_ASSET_FILE_NAME, None), 'type': feed_item.get(FieldMap.CREATIVE_TYPE, None)}}
Creates a new creative asset DCM object from a feed item representing a creative asset from the Bulkdozer feed. This function simply creates the object to be inserted later by the BaseDAO object. Args: feed_item: Feed item representing the creative asset from the Bulkdozer feed. Returns: A creative asset object read...
github-repos
def matvec(self, x, adjoint=False, name='matvec'): with self._name_scope(name): block_dimensions = self._block_range_dimensions() if adjoint else self._block_domain_dimensions() if linear_operator_util.arg_is_blockwise(block_dimensions, x, -1): for i, block in enumerate(x): ...
Transform [batch] vector `x` with left multiplication: `x --> Ax`. ```python # Make an operator acting like batch matrix A. Assume A.shape = [..., M, N] operator = LinearOperator(...) X = ... # shape [..., N], batch vector Y = operator.matvec(X) Y.shape ==> [..., M] Y[..., :] = sum_j A[..., :, j] X[..., j] ``` A...
github-repos
def inference(self, observed_arr): self.__pred_arr = self.__lstm_model.inference(observed_arr) return self.__pred_arr
Draws samples from the `true` distribution. Args: observed_arr: `np.ndarray` of observed data points. Returns: `np.ndarray` of inferenced.
juraj-google-style
def sort_ordered_objects(items, getter=(lambda x: x)): return sorted(items, key=(lambda x: getattr(getter(x), OrderedBase.CREATION_COUNTER_FIELD, (- 1))))
Sort an iterable of OrderedBase instances. Args: items (iterable): the objects to sort getter (callable or None): a function to extract the OrderedBase instance from an object. Examples: >>> sort_ordered_objects([x, y, z]) >>> sort_ordered_objects(v.items(), getter=lambda e: e[1])
codesearchnet
def get_min_max_value(statistics: calib_stats_pb2.CalibrationStatistics, calib_opts: stablehlo_quant_config_pb2.CalibrationOptions) -> tuple[float, float]: calib_method = calib_opts.calibration_method if calib_method not in _REGISTRY: raise ValueError(f'Unsupported calibration method: {calib_method}') ...
Calculates min and max from statistics using calibration options. Args: statistics: Collected calibration statistics. calib_opts: Calibration options used for calculating min and max. Returns: (min_value, max_value): Min and max calculated using calib_opts. Raises: ValueError: Unsupported calibration method is given...
github-repos
def get_service_state_object_id(subsystem: str, name: str, version: str) -> str: return '{}:{}:{}'.format(subsystem, name, version)
Return service state data object key. Args: subsystem (str): Subsystem the service belongs to name (str): Name of the Service version (str): Version of the Service Returns: str, Key used to store the service state data object
juraj-google-style
def _batch_accumulator(cls, primals, tangents): acc = super(ForwardAccumulator, cls).__new__(cls, primals, tangents) acc._recording = False acc._accumulator = pywrap_tfe.TFE_Py_ForwardAccumulatorNew(True) primal_ids = set() for primal, tangent in zip(nest.flatten(primals), nest.flatten(tangents)): ...
Factory constructor to test accumulator on batches of tangents. Args: primals: A tensor or nested structure of tensors to watch. tangents: A tensor or nested structure of tensors, with the same nesting structure as `primals`, with each element being a vector with compatible shape `[None] + primal.shape` of the corresp...
github-repos
def get_configuration_file(configuration_files: list[str]) -> str: configuration_files_map = {} for file_name in configuration_files: if file_name.startswith('config.') and file_name.endswith('.json') and (file_name != 'config.json'): v = file_name.removeprefix('config.').removesuffix('.json...
Get the configuration file to use for this version of transformers. Args: configuration_files (`List[str]`): The list of available configuration files. Returns: `str`: The configuration file to use.
github-repos
class ZoeDepthReassembleStage(nn.Module): def __init__(self, config): super().__init__() self.readout_type = config.readout_type self.layers = nn.ModuleList() for neck_hidden_size, factor in zip(config.neck_hidden_sizes, config.reassemble_factors): self.layers.append(Zoe...
This class reassembles the hidden states of the backbone into image-like feature representations at various resolutions. This happens in 3 stages: 1. Map the N + 1 tokens to a set of N tokens, by taking into account the readout ([CLS]) token according to `config.readout_type`. 2. Project the channel dimension of the h...
github-repos
def get(self, uid: int) -> Optional[CachedMessage]: return self._cache.get(uid)
Return the given cached message. Args: uid: The message UID.
codesearchnet
def GetCampaignFeeds(client, feed, placeholder_type): campaign_feed_service = client.GetService('CampaignFeedService', 'v201809') campaign_feeds = [] more_pages = True selector = { 'fields': ['CampaignId', 'MatchingFunction', 'PlaceholderTypes'], 'predicates': [ { 'field...
Get a list of Feed Item Ids used by a campaign via a given Campaign Feed. Args: client: an AdWordsClient instance. feed: a Campaign Feed. placeholder_type: the Placeholder Type. Returns: A list of Feed Item Ids.
juraj-google-style
def NHWCToNCHW(input_tensor): if isinstance(input_tensor, tensor.Tensor): return array_ops.transpose(input_tensor, [0, 3, 1, 2]) else: return [input_tensor[0], input_tensor[3], input_tensor[1], input_tensor[2]]
Convert the input from NHWC format to NCHW. Args: input_tensor: a 4-D tensor, or a 4-element array representing the same. Returns: the converted tensor or a shape array
github-repos
def direct_transformers_import(path: str, file='__init__.py') -> ModuleType: name = 'transformers' location = os.path.join(path, file) spec = importlib.util.spec_from_file_location(name, location, submodule_search_locations=[path]) module = importlib.util.module_from_spec(spec) spec.loader.exec_modu...
Imports transformers directly Args: path (`str`): The path to the source file file (`str`, *optional*): The file to join with the path. Defaults to "__init__.py". Returns: `ModuleType`: The resulting imported module
github-repos
def AddRow(self, values): if self._number_of_columns and len(values) != self._number_of_columns: raise ValueError('Number of values is out of bounds.') self._rows.append(values) if not self._number_of_columns: self._number_of_columns = len(values)
Adds a row of values. Args: values (list[object]): values. Raises: ValueError: if the number of values is out of bounds.
juraj-google-style
def shift(x, offset, dim, wrap, name=None): return ShiftOperation(x, offset, dim, wrap, name=name).outputs[0]
Shift operation. Shift x right by +offset in dimension dim. Args: x: a Tensor offset: an integer. If negative, shift left instead of right. dim: a Dimension of x wrap: a boolean - whether to wrap (True) or pad with zeros (False). name: an optional string Returns: a Tensor with the same shape and dtype as x
juraj-google-style
def go_from(self, vertex): if self.vertex_out: self.vertex_out.edges_out.remove(self) self.vertex_out = vertex vertex.edges_out.add(self)
Tell the edge to go out from this vertex. Args: vertex (Vertex): vertex to go from.
codesearchnet
def get_member_information(self, query_params=None): return self.fetch_json(uri_path=self.base_uri, query_params=(query_params or {}))
Get Information for a member. Returns a dictionary of values. Returns: dict
codesearchnet
def create(self, vrf_name, rd=None): commands = [('vrf definition %s' % vrf_name)] if rd: commands.append(('rd %s' % rd)) return self.configure(commands)
Creates a new VRF resource Note: A valid RD has the following format admin_ID:local_assignment. The admin_ID can be an AS number or globally assigned IPv4 address. The local_assignment can be an integer between 0-65,535 if the admin_ID is an IPv4 address and can be between 0-4,294,967,295 if the admin_ID is an AS numb...
codesearchnet
def _compute_causal_mask(self, query, value=None): q_seq_length = ops.shape(query)[1] v_seq_length = q_seq_length if value is None else ops.shape(value)[1] ones_mask = ops.ones((1, q_seq_length, v_seq_length), dtype='int32') row_index = ops.cumsum(ones_mask, axis=-2) col_index = ops.cumsum(ones_mask...
Computes a causal mask (e.g., for masked self-attention layers). For example, if query and value both contain sequences of length 4, this function returns a boolean tensor equal to: ``` [[[True, False, False, False], [True, True, False, False], [True, True, True, False], [True, True, True, True]]] ``` Args:...
github-repos
def compose(*funcs): if not funcs: return lambda *args: args[0] if args else None if len(funcs) == 1: return funcs[0] last = funcs[-1] rest = funcs[0:-1] return lambda *args: reduce(lambda ax, func: func(ax), reversed(rest), last(*args))
chained function composition wrapper creates function f, where f(x) = arg0(arg1(arg2(...argN(x)))) if *funcs is empty, an identity function is returned. Args: *funcs: list of functions to chain Returns: a new function composed of chained calls to *args
juraj-google-style
def GetAttributeContainerByIndex(self, index): if index < 0: raise IndexError( 'Unsupported negative index value: {0:d}.'.format(index)) if index < len(self._list): return self._list[index] return None
Retrieves a specific serialized attribute container from the list. Args: index (int): attribute container index. Returns: bytes: serialized attribute container data or None if not available. Raises: IndexError: if the index is less than zero.
juraj-google-style
def Parse(self, raw_data): self.results = raw_data for f in self.filters: self.results = f.Parse(self.results) return self.results
Take the results and yield results that passed through the filters. The output of each filter is used as the input for successive filters. Args: raw_data: An iterable series of rdf values. Returns: A list of rdf values that matched all filters.
codesearchnet
def launch_external_file(filename: str, raise_if_fails: bool = False) -> None: log.info("Launching external file: {!r}", filename) try: if sys.platform.startswith('linux'): cmdargs = ["xdg-open", filename] subprocess.call(cmdargs) else: ...
Launches a file using the operating system's standard launcher. Args: filename: file to launch raise_if_fails: raise any exceptions from ``subprocess.call(["xdg-open", filename])`` (Linux) or ``os.startfile(filename)`` (otherwise)? If not, exceptions are suppressed.
juraj-google-style
def update_unexpected_keys(self, model, unexpected_keys: List[str], prefix: str) -> List[str]: return unexpected_keys
Override this method if you want to adjust the `unexpected_keys`. Args: unexpected_keys (`List[str]`, *optional*): The list of unexpected keys in the checkpoint compared to the state dict of the model
github-repos
def list_media_endpoint_keys(access_token, subscription_id, rgname, msname): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', rgname, '/providers/microsoft.media/', '/mediaservices/', msname, '/listKeys?api-version=', MEDIA_API]) return do_get(endpoint, access_token...
list the media endpoint keys in a media service Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. rgname (str): Azure resource group name. msname (str): Media service name. Returns: HTTP response. JSON body.
codesearchnet
def get_load_balancer(self, id): return LoadBalancer.get_object(api_token=self.token, id=id)
Returns a Load Balancer object by its ID. Args: id (str): Load Balancer ID
juraj-google-style
def AdManagerDateTimePacker(cls, value, version): if isinstance(value, datetime.datetime): if value.tzinfo is None: raise googleads.errors.GoogleAdsValueError( 'Datetime %s is not timezone aware.' % value ) return { 'date': cls.AdManagerDateTimePacker(value.da...
Returns dicts formatted for Ad Manager SOAP based on date/datetime. Args: value: A date or datetime object to be converted. version: the version of the current API, e.g. 'v201811' Returns: The value object correctly represented for Ad Manager SOAP.
juraj-google-style
def optimal_partitions(sizes, counts, num_part): if (num_part < 2): return [(sizes[0], sizes[(- 1)])] if (num_part >= len(sizes)): partitions = [(x, x) for x in sizes] return partitions nfps = _compute_nfps_real(counts, sizes) (partitions, _, _) = _compute_best_partitions(num_par...
Compute the optimal partitions given a distribution of set sizes. Args: sizes (numpy.array): The complete domain of set sizes in ascending order. counts (numpy.array): The frequencies of all set sizes in the same order as `sizes`. num_part (int): The number of partitions to create. Returns: list: A list of partitions...
codesearchnet
def get(self, block=True, timeout=None): if (not block): (success, item) = ray.get(self.actor.get.remote()) if (not success): raise Empty elif (timeout is None): (success, item) = ray.get(self.actor.get.remote()) while (not success): (success, item) = ray....
Gets an item from the queue. Uses polling if block=True, so there is no guarantee of order if multiple consumers get from the same empty queue. Returns: The next item in the queue. Raises: Empty if the queue is empty and blocking is False.
codesearchnet
def _restructure_if_volume_follows_journal(left, right): def _get_volume_keyword_op_and_remaining_subtree(right_subtree): if (isinstance(right_subtree, NotOp) and isinstance(right_subtree.op, KeywordOp) and (right_subtree.op.left == Keyword('volume'))): return (None, None) elif (isinsta...
Remove volume node if it follows a journal logically in the tree hierarchy. Args: left (ast.ASTElement): The journal KeywordOp node. right (ast.ASTElement): The rest of the tree to be restructured. Return: (ast.ASTElement): The restructured tree, with the volume node removed. Notes: This happens to support queries l...
codesearchnet
def _UpdateStatus( self, status, display_name, number_of_consumed_sources, storage_writer, force=False): current_timestamp = time.time() if not force and current_timestamp < ( self._last_status_update_timestamp + self._STATUS_UPDATE_INTERVAL): return if status == definitions....
Updates the processing status. Args: status (str): human readable status of the processing e.g. 'Idle'. display_name (str): human readable of the file entry currently being processed. number_of_consumed_sources (int): number of consumed sources. storage_writer (StorageWriter): storage writer for a session storage. for...
juraj-google-style
def diet_expert(x, hidden_size, params): @fn_with_diet_vars(params) def diet_expert_internal(x): dim = x.get_shape().as_list()[(- 1)] h = tf.layers.dense(x, hidden_size, activation=tf.nn.relu, use_bias=False) y = tf.layers.dense(h, dim, use_bias=False) y *= tf.rsqrt(tf.to_float(...
A two-layer feed-forward network with relu activation on hidden layer. Uses diet variables. Recomputes hidden layer on backprop to save activation memory. Args: x: a Tensor with shape [batch, io_size] hidden_size: an integer params: a diet variable HParams object. Returns: a Tensor with shape [batch, io_size]
codesearchnet
def unload(self): unloaded = False if self._lib is not None: if self._winlib is not None: ctypes.windll.kernel32.FreeLibrary.argtypes = ( ctypes.c_void_p, ) ...
Unloads the library's DLL if it has been loaded. This additionally cleans up the temporary DLL file that was created when the library was loaded. Args: self (Library): the ``Library`` instance Returns: ``True`` if the DLL was unloaded, otherwise ``False``.
juraj-google-style
def render(self, fname=''): import qnet.visualization.circuit_pyx as circuit_visualization from tempfile import gettempdir from time import time, sleep if (not fname): tmp_dir = gettempdir() fname = os.path.join(tmp_dir, 'tmp_{}.png'.format(hash(time))) if circuit_visualization.draw_...
Render the circuit expression and store the result in a file Args: fname (str): Path to an image file to store the result in. Returns: str: The path to the image file
codesearchnet