code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def match_row_splits_dtypes(*tensors, **kwargs): return_dtype = kwargs.pop('return_dtype', False) if kwargs: raise ValueError(f'Unexpected keyword args {kwargs}.') has_int32 = False has_int64 = False for tensor in tensors: if isinstance(tensor, RaggedTensor): if tensor.ro...
Return a copy of `tensors` with row_splits all having the same dtype. Args: *tensors: A list of Tensors or RaggedTensors. **kwargs: If 'return_dtype=True', then return a tuple (dtype, tensors), where `dtype` is the data type used by row-splits, and `tensors` is the converted list of `Tensors` and `RaggedTensors`. Ret...
github-repos
async def delCronJob(self, iden): cron = self.cell.agenda.appts.get(iden) if cron is None: raise s_exc.NoSuchIden() self._trig_auth_check(cron.useriden) await self.cell.agenda.delete(iden)
Delete a cron job Args: iden (bytes): The iden of the cron job to be deleted
juraj-google-style
def _StopExtractionProcesses(self, abort=False): logger.debug('Stopping extraction processes.') self._StopMonitoringProcesses() if abort: self._AbortTerminate() logger.debug('Emptying task queue.') self._task_queue.Empty() for _ in self._processes_per_pid: try: self._...
Stops the extraction processes. Args: abort (bool): True to indicated the stop is issued on abort.
codesearchnet
def __init__(self, asset_id, amount): self.AssetId = asset_id self.Amount = amount
Create an instance. Args: asset_id (UInt256): amount (Fixed8):
juraj-google-style
def _print_test_names_for_suite(suite_class): config = config_parser.TestRunConfig() runner = test_runner.TestRunner(log_dir=config.log_path, testbed_name=config.testbed_name) cls = suite_class(runner, config) try: cls.setup_suite(config) finally: cls.teardown_suite() last = '' ...
Prints the names of all the tests in a suite classes. Args: suite_class: a test suite_class to be run.
github-repos
def sync_main(async_main, config_path=None, default_config=None, should_validate_task=True, loop_function=asyncio.get_event_loop): context = _init_context(config_path, default_config) _init_logging(context) if should_validate_task: validate_task_schema(context) loop = loop_function() loop.ru...
Entry point for scripts using scriptworker. This function sets up the basic needs for a script to run. More specifically: * it creates the scriptworker context and initializes it with the provided config * the path to the config file is either taken from `config_path` or from `sys.argv[1]`. * it verifies `sys.argv` do...
codesearchnet
def _prepare_headers(self, additional_headers=None, **kwargs): user_agent = 'pyseaweed/{version}'.format(version=__version__) headers = {'User-Agent': user_agent} if (additional_headers is not None): headers.update(additional_headers) return headers
Prepare headers for http communication. Return dict of header to be used in requests. Args: .. versionadded:: 0.3.2 **additional_headers**: (optional) Additional headers to be used with request Returns: Headers dict. Key and values are string
codesearchnet
def fillup_layer(layer_length, arrow_char): breakwire_layer = [] for _ in range(layer_length): breakwire_layer.append(BreakWire(arrow_char)) return breakwire_layer
Creates a layer with BreakWire elements. Args: layer_length (int): The length of the layer to create arrow_char (char): The char used to create the BreakWire element. Returns: list: The new layer.
juraj-google-style
def sort_index(self, **kwargs): axis = kwargs.pop('axis', 0) index = (self.columns if axis else self.index) ascending = kwargs.pop('ascending', True) if (ascending is None): ascending = False kwargs['ascending'] = ascending def sort_index_builder(df, **kwargs): if axis: ...
Sorts the data with respect to either the columns or the indices. Returns: DataManager containing the data sorted by columns or indices.
codesearchnet
def copy_r(src, dst): abssrc = os.path.abspath(src) absdst = os.path.abspath(dst) try: os.makedirs(absdst) except OSError: pass for f in os.listdir(abssrc): fpath = os.path.join(abssrc, f) if os.path.isfile(fpath): shutil.copy(fpath, absdst)...
Implements a recursive copy function similar to Unix's "cp -r" command. Surprisingly, python does not have a real equivalent. shutil.copytree only works if the destination directory is not present. Args: src (str): Source folder to copy. dst (str): Destination folder.
juraj-google-style
def apply(self, window_length, samples=True, func1d=None): window_length /= (1 if samples else self.step) if (func1d is None): func1d = np.mean params = self.__dict__.copy() out = self._rolling_window(int(window_length), func1d) return Curve(out, params=params)
Runs any kind of function over a window. Args: window_length (int): the window length. Required. samples (bool): window length is in samples. Use False for a window length given in metres. func1d (function): a function that takes a 1D array and returns a scalar. Default: ``np.mean()``. Returns: Curve.
codesearchnet
def intrusion_sets(self, name, owner=None, **kwargs): return IntrusionSet(self.tcex, name, owner=owner, **kwargs)
Create the Intrustion Set TI object. Args: owner: name: **kwargs: Return:
juraj-google-style
def stop_tuning_job(self, name): try: LOGGER.info('Stopping tuning job: {}'.format(name)) self.sagemaker_client.stop_hyper_parameter_tuning_job(HyperParameterTuningJobName=name) except ClientError as e: error_code = e.response['Error']['Code'] if (error_code == 'ValidationExcepti...
Stop the Amazon SageMaker hyperparameter tuning job with the specified name. Args: name (str): Name of the Amazon SageMaker hyperparameter tuning job. Raises: ClientError: If an error occurs while trying to stop the hyperparameter tuning job.
codesearchnet
def get_compound_bodies(node): if isinstance(node, (ast.Module, ast.FunctionDef, ast.ClassDef, ast.With)): return [node.body] elif isinstance(node, (ast.If, ast.While, ast.For)): return [node.body, node.orelse] elif (PY2 and isinstance(node, ast.TryFinally)): return [node.body, node....
Returns a list of bodies of a compound statement node. Args: node: AST node. Returns: A list of bodies of the node. If the given node does not represent a compound statement, an empty list is returned.
codesearchnet
def replace_batch_norm(model): for name, module in model.named_children(): if isinstance(module, nn.BatchNorm2d): new_module = ConditionalDetrFrozenBatchNorm2d(module.num_features) if not module.weight.device == torch.device('meta'): new_module.weight.data.copy_(modul...
Recursively replace all `torch.nn.BatchNorm2d` with `ConditionalDetrFrozenBatchNorm2d`. Args: model (torch.nn.Module): input model
github-repos
def delete(workflow_id: str = None, workflow_version: str = None): if workflow_id is None and workflow_version is None: keys = DB.get_keys("workflow_definitions:*") DB.delete(*keys) elif workflow_id is not None and workflow_version is None: keys = DB.get_keys("workflow_definitions:{...
Delete workflow definitions. Args: workflow_id (str, optional): Optional workflow identifier workflow_version (str, optional): Optional workflow identifier version If workflow_id and workflow_version are None, delete all workflow definitions.
juraj-google-style
def broadcast_dynamic_shape(shape_x: dynamic_ragged_shape.DenseOrRaggedShape, shape_y: dynamic_ragged_shape.DenseOrRaggedShape) -> dynamic_ragged_shape.DynamicRaggedShape: if not isinstance(shape_x, dynamic_ragged_shape.DynamicRaggedShape): shape_x = dynamic_ragged_shape.DynamicRaggedShape([], shape_x) ...
Returns the shape formed by broadcasting two shapes to be compatible. 1. If shape_x and shape_y both have row_partitions, then fail if their dtypes don't match. 2. If neither has row_partitions and they have different dtypes, go with int64. 3. If one has row_partitions, go with that dtype. Args: shape_x: A `DynamicRa...
github-repos
def append_with_data(url, data): if data is None: return url url_parts = list(urlparse(url)) query = OrderedDict(parse_qsl(url_parts[4], keep_blank_values=True)) query.update(data) url_parts[4] = URLHelper.query_dict_to_string(query) return urlun...
Append the given URL with the given data OrderedDict. Args: url (str): The URL to append. data (obj): The key value OrderedDict to append to the URL. Returns: str: The new URL.
juraj-google-style
def info(self): result = list() result.append('Agents:\n') for agent in self._all_agents: result.append('\tName: ') result.append(agent.name) result.append('\n\tType: ') result.append(type(agent).__name__) result.append('\n\t') result.append('Sensors:\n') ...
Returns a string with specific information about the environment. This information includes which agents are in the environment and which sensors they have. Returns: str: The information in a string format.
codesearchnet
def save_results(self, output_dir='.', prefix='', prefix_sep='_', image_list=None): if prefix == '': prefix_sep = '' if not exists(output_dir): makedirs(output_dir) logger.debug("Saving results...") if image_list is None: ...
Write out any images generated by the meta-analysis. Args: output_dir (str): folder to write images to prefix (str): all image files will be prepended with this string prefix_sep (str): glue between the prefix and rest of filename image_list (list): optional list of images to save--e.g., ['pFgA_z', 'pAgF']. If image_li...
juraj-google-style
def get_meta_graph_def_from_tags(self, tags): found_match = False meta_graph_def_to_load = None available_tags = [] for meta_graph_def in self._saved_model.meta_graphs: available_tags.append(set(meta_graph_def.meta_info_def.tags)) if set(meta_graph_def.meta_info_def.tags) == set(tags): ...
Return MetaGraphDef with the exact specified tags. Args: tags: A list or set of string tags that identify the MetaGraphDef. Returns: MetaGraphDef with the same tags. Raises: RuntimeError: if no metagraphs were found with the associated tags.
github-repos
def stream( self, accountID, **kwargs ): request = Request( 'GET', '/v3/accounts/{accountID}/transactions/stream' ) request.set_path_param( 'accountID', accountID ) request.set_stream(True) ...
Get a stream of Transactions for an Account starting from when the request is made. Args: accountID: Account Identifier Returns: v20.response.Response containing the results from submitting the request
juraj-google-style
def FromDictionary(cls, dictionary): if 'user_id' in dictionary: raise errors.GitkitClientError('use localId instead') if 'localId' not in dictionary: raise errors.GitkitClientError('must specify localId') if 'email' not in dictionary: raise errors.GitkitClientError('must specify emai...
Initializes from user specified dictionary. Args: dictionary: dict of user specified attributes Returns: GitkitUser object
juraj-google-style
def _convert_keras_to_saved_model(self, output_dir): try: def _is_keras_3(): try: import keras return keras.__version__.startswith('3') and isinstance(self._keras_model, keras.layers.Layer) except ImportError: return False...
Save Keras model to the SavedModel format. Args: output_dir: The output directory to save the SavedModel. Returns: graph_def: The frozen GraphDef. input_tensors: List of input tensors. output_tensors: List of output tensors.
github-repos
def _to_tf_type(dtype): return dtypes.as_dtype(dtype)
Converts a native python or numpy type to TF DType. Args: dtype: Could be a python type, a numpy type or a TF DType. Returns: A tensorflow `DType`.
github-repos
def transform(self, args): if self.parse_error(): AliasManager.write_alias_config_hash(empty_hash=True) return args if self.detect_alias_config_change(): self.load_full_command_table() self.collided_alias = AliasManager.build_collision_table(self.alias_table.sections()) b...
Transform any aliases in args to their respective commands. Args: args: A list of space-delimited command input extracted directly from the console. Returns: A list of transformed commands according to the alias configuration file.
codesearchnet
def validate(datapackage, schema='base'): errors = [] schema_obj = None datapackage_obj = None if isinstance(datapackage, six.string_types): try: datapackage_obj = json.loads(datapackage) except ValueError as e: errors.append(DataPackageValidateException(e)) e...
Validate Data Package datapackage.json files against a jsonschema. Args: datapackage (str or dict): The Data Package descriptor file (i.e. datapackage.json) as a dict or its contents in a string. schema (str or dict): If a string, it can be the schema ID in the registry, a local path, a URL or the schema's JSON as a s...
codesearchnet
def register_multi_flags_validator(flag_names, multi_flags_checker, message='Flags validation failed', flag_values=FLAGS): v = gflags_validators.MultiFlagsValidator(flag_names, multi_flags_checker, message) _add_validator(flag_values, v)
Adds a constraint to multiple flags. The constraint is validated when flags are initially parsed, and after each change of the corresponding flag's value. Args: flag_names: [str], a list of the flag names to be checked. multi_flags_checker: callable, a function to validate the flag. input - dictionary, with keys() be...
codesearchnet
async def check_status(self, pipeline_uuid: str) -> api_pb2.Status: self._verify_pipeline_uuid(pipeline_uuid) request = api_pb2.CheckStatusRequest(pipeline_uuid=pipeline_uuid) response = await self._stub.CheckStatus(request, **self._kwargs) return response.status
Get status of the pipeline by his pipeline Args: pipeline_uuid: uuid of the pipeline Returns: status: status of the pipeline
github-repos
def task_done(self, message): topic_partition = (message.topic, message.partition) if topic_partition not in self._topics: logger.warning('Unrecognized topic/partition in task_done message: ' '{0}:{1}'.format(*topic_partition)) return False ...
Mark a fetched message as consumed. Offsets for messages marked as "task_done" will be stored back to the kafka cluster for this consumer group on commit() Arguments: message (KafkaMessage): the message to mark as complete Returns: True, unless the topic-partition for this message has not been configured for the con...
juraj-google-style
def _download(self): repo = self._config.get('napps', 'repo') napp_id = '{}/{}-{}.napp'.format(self.user, self.napp, self.version) uri = os.path.join(repo, napp_id) return urllib.request.urlretrieve(uri)[0]
Download NApp package from server. Return: str: Downloaded temp filename. Raises: urllib.error.HTTPError: If download is not successful.
codesearchnet
def _CreateStyleForRoute(self, doc, route): style_id = 'route_%s' % route.route_id style = ET.SubElement(doc, 'Style', {'id': style_id}) linestyle = ET.SubElement(style, 'LineStyle') width = ET.SubElement(linestyle, 'width') type_to_width = {0: '3', 1: '3', ...
Create a KML Style element for the route. The style sets the line colour if the route colour is specified. The line thickness is set depending on the vehicle type. Args: doc: The KML Document ElementTree.Element instance. route: The transitfeed.Route to create the style for. Returns: The id of the style as a string.
juraj-google-style
def parent_callback(self, executor_fu): with self._update_lock: if (not executor_fu.done()): raise ValueError('done callback called, despite future not reporting itself as done') if (executor_fu != self.parent): if ((executor_fu.exception() is None) and (not isinstance(execut...
Callback from a parent future to update the AppFuture. Used internally by AppFuture, and should not be called by code using AppFuture. Args: - executor_fu (Future): Future returned by the executor along with callback. This may not be the current parent future, as the parent future may have already been updated to poi...
codesearchnet
def get_content_of_file(self, name, full_path=False): if self.handle: for member in self.handle.getmembers(): if (full_path and member.name == name) or ( not full_path and os.path.basename( member.name) == name): ...
Returns content of file from archive. If full_path is set to False and two files with given name exist, content of one is returned (it is not specified which one that is). If set to True, returns content of exactly that file. Args: name: name of the file to get content of Returns: Content of the file with given name ...
juraj-google-style
def _NormalizedVolumeIdentifiers(self, volume_system, volume_identifiers, prefix='v'): normalized_volume_identifiers = [] for volume_identifier in volume_identifiers: if isinstance(volume_identifier, int): volume_identifier = '{0:s}{1:d}'.format(prefix, volume_identifier) elif (not v...
Normalizes volume identifiers. Args: volume_system (VolumeSystem): volume system. volume_identifiers (list[int|str]): allowed volume identifiers, formatted as an integer or string with prefix. prefix (Optional[str]): volume identifier prefix. Returns: list[str]: volume identifiers with prefix. Raises: ScannerError: ...
codesearchnet
def __enter__(self) -> str: ctx = context.context() if ctx.executing_eagerly(): old_name = ctx.scope_name name = self._name if not name: scope_name = '' elif name[-1] == '/': scope_name = name elif old_name: scope_name = old_name + name...
Start the scope block. Returns: The scope name.
github-repos
def prepend_to_list(self, key, *value, pipeline=False): if pipeline: self._pipeline.lpush(key, *value) else: self._db.lpush(key, *value)
Add new element to the start of the list stored at key. Args: key (str): Key where the list is stored value: Value to add to the list pipeline (bool): True, start a transaction block. Default false.
codesearchnet
def get_cond_latents(all_latents=None, hparams=None): cond_latents = None if hparams.gen_mode == "conditional": if hparams.latent_dist_encoder in ["conv_net", "conv3d_net"]: num_cond_latents = (hparams.num_cond_latents + int(hparams.cond_first_frame)) if len(all_latents)...
Get z^{cond}_{t} given z^{1..t-1}. Args: all_latents: list of list of tensors, outer-size equals no.of time_steps-1 inner-size equals hparams.n_levels. hparams: See next_frame_glow_hparams. Returns: cond_latents: conditional latents at time-step t.
juraj-google-style
def draw_rect(self, rect): check_int_err(lib.SDL_RenderDrawRect(self._ptr, rect._ptr))
Draw a rectangle on the current rendering target. Args: rect (Rect): The destination rectangle, or None to outline the entire rendering target. Raises: SDLError: If an error is encountered.
juraj-google-style
def _weight_generator(self, reviewers): scores = [r.anomalous_score for r in reviewers] mu = np.average(scores) sigma = np.std(scores) if sigma: def w(v): try: exp = math.exp(self.alpha * (v - mu) / sigma) ...
Compute a weight function for the given reviewers. Args: reviewers: a set of reviewers to compute weight function. Returns: a function computing a weight for a reviewer.
juraj-google-style
def as_dataframe(self, pattern='*', max_rows=None): data = [] for i, group in enumerate(self.list(pattern)): if max_rows is not None and i >= max_rows: break parent = self._group_dict.get(group.parent_id) parent_display_name = '' if parent is None else parent.display_name da...
Creates a pandas dataframe from the groups that match the filters. Args: pattern: An optional pattern to further filter the groups. This can include Unix shell-style wildcards. E.g. ``"Production *"``, ``"*-backend"``. max_rows: The maximum number of groups to return. If None, return all. Returns: A pandas dataframe ...
juraj-google-style
def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + toke...
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A BERT sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the spe...
github-repos
def get_keypoint_predictions(heatmaps: np.ndarray) -> Tuple[np.ndarray, np.ndarray]: if not isinstance(heatmaps, np.ndarray): raise ValueError('Heatmaps should be np.ndarray') if heatmaps.ndim != 4: raise ValueError('Heatmaps should be 4-dimensional') batch_size, num_keypoints, _, width = he...
Get keypoint predictions from score maps. Args: heatmaps (`np.ndarray` of shape `(batch_size, num_keypoints, height, width)`): Model predicted heatmaps. Returns: tuple: A tuple containing aggregated results. - coords (`np.ndarray` of shape `(batch_size, num_keypoints, 2)`): Predicted keypoint location. - scores (`np...
github-repos
def parse_compounds(compound_info, case_id, variant_type): compounds = [] if compound_info: for family_info in compound_info.split(','): splitted_entry = family_info.split(':') if (splitted_entry[0] == case_id): for compound in splitted_entry[1].split('|'): ...
Get a list with compounds objects for this variant. Arguments: compound_info(str): A Variant dictionary case_id (str): unique family id variant_type(str): 'research' or 'clinical' Returns: compounds(list(dict)): A list of compounds
codesearchnet
def extractHolidayDate(self, setting_holiday): ret = namedtuple("result", ["Holiday", "Month", "Day"]) setting_holiday += 1 ret.Holiday = str(setting_holiday) if (setting_holiday < 1) or (setting_holiday > Extents.Holidays): ekm_log("Out of bounds: holiday " + str(...
Read a single holiday date from meter buffer. Args: setting_holiday (int): Holiday from 0-19 or in range(Extents.Holidays) Returns: tuple: Holiday tuple, elements are strings. =============== ====================== Holiday Holiday 0-19 as string Day Day 1-31 as string Month Monty 1-12 ...
juraj-google-style
def _ParseKeyWithPlugin(self, parser_mediator, registry_key, plugin): try: plugin.UpdateChainAndProcess(parser_mediator, registry_key) except (IOError, dfwinreg_errors.WinRegistryValueError) as exception: parser_mediator.ProduceExtractionWarning( 'in key: {0:s} error: {1!s}'.format(re...
Parses the Registry key with a specific plugin. Args: parser_mediator (ParserMediator): parser mediator. registry_key (dfwinreg.WinRegistryKey): Windwos Registry key. plugin (WindowsRegistryPlugin): Windows Registry plugin.
juraj-google-style
def execute(self, command, *args, encoding=_NOTSET): if ((self._reader is None) or self._reader.at_eof()): msg = (self._close_msg or 'Connection closed or corrupted') raise ConnectionClosedError(msg) if (command is None): raise TypeError('command must not be None') if (None in args):...
Executes redis command and returns Future waiting for the answer. Raises: * TypeError if any of args can not be encoded as bytes. * ReplyError on redis '-ERR' responses. * ProtocolError when response can not be decoded meaning connection is broken. * ConnectionClosedError when either client or server has closed the co...
codesearchnet
def batch_normalize_with_arguments(x, arguments): x = prettytensor.wrap(x) if isinstance(arguments, bool): if arguments: return x.batch_normalize() else: return x kwargs = arguments._asdict() defaults = prettytensor._defaults for arg in ('learned_moments_updat...
Applies batch normalization to x as specified in arguments. Args: x: A Pretty Tensor. arguments: Either a boolean to batch_normalize or a BatchNormalizationArguments Returns: x with batch normalization applied.
codesearchnet
def get_counter_metric(result: PipelineResult, namespace: str, name: str) -> int: metrics = result.metrics().query(MetricsFilter().with_namespace(namespace).with_name(name)) counters = metrics['counters'] if len(counters) > 1: raise RuntimeError('%d instead of one metric result matches name: %s in n...
get specific counter metric from pipeline result Args: result: the PipelineResult which metrics are read from namespace: a string representing the namespace of wanted metric name: a string representing the name of the wanted metric Returns: the result of the wanted metric if it exist, else -1
github-repos
def range(*args, prefix: str): return [NamedQubit(prefix + str(i)) for i in range(*args)]
Returns a range of NamedQubits. The range returned starts with the prefix, and followed by a qubit for each number in the range, e.g.: NamedQubit.range(3, prefix="a") -> ["a1", "a2", "a3] NamedQubit.range(2, 4, prefix="a") -> ["a2", "a3] Args: *args: Args to be passed to Python's standard range function. prefix: A p...
juraj-google-style
def generate_sitemap(self, path='sitemap.xml', https=False): sitemap = russell.sitemap.generate_sitemap(self, https=https) self.write_file(path, sitemap)
Generate an XML sitemap. Args: path (str): The name of the file to write to. https (bool): If True, links inside the sitemap with relative scheme (e.g. example.com/something) will be set to HTTPS. If False (the default), they will be set to plain HTTP.
juraj-google-style
def set_timestamp(cls, filename: str, response: HTTPResponse): last_modified = response.fields.get('Last-Modified') if not last_modified: return try: last_modified = email.utils.parsedate(last_modified) except ValueError: _logger.exception('...
Set the Last-Modified timestamp onto the given file. Args: filename: The path of the file response: Response
juraj-google-style
def wait(self, timeout=None): if self._future: try: self._future.exception(timeout) except concurrent.futures.TimeoutError: self._timeout() self._refresh_state() else: while not self.is_complete: if timeout is not None: if timeout <=...
Wait for the job to complete, or a timeout to happen. Args: timeout: how long to wait before giving up (in seconds); default None which means no timeout. Returns: The Job
juraj-google-style
def profile(self, num): baseuri = (self._BASE_URI + 'company/{}'.format(num)) res = self.session.get(baseuri) self.handle_http_error(res) return res
Search for company profile by company number. Args: num (str): Company number to search on.
codesearchnet
def heightmap_get_normal( hm: np.ndarray, x: float, y: float, waterLevel: float ) -> Tuple[float, float, float]: cn = ffi.new("float[3]") lib.TCOD_heightmap_get_normal(_heightmap_cdata(hm), x, y, cn, waterLevel) return tuple(cn)
Return the map normal at given coordinates. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. x (float): The x coordinate. y (float): The y coordinate. waterLevel (float): The heightmap is considered flat below this value. Returns: Tuple[float, float, float]: An (x, y, z) vector normal.
juraj-google-style
def CreateSmartShoppingAdGroup(client, campaign_id): ad_group_service = client.GetService('AdGroupService', version='v201809') ad_group = {'campaignId': campaign_id, 'name': ('Smart Shopping ad group adgroup_operations = {'operator': 'ADD', 'operand': ad_group} ad_group = ad_group_service.mutate(adgrou...
Adds a new Smart Shopping ad group. Args: client: an AdWordsClient instance. campaign_id: the str ID of a Smart Shopping campaign. Returns: An ad group ID.
codesearchnet
def push_doc(self, document): msg = self._protocol.create('PUSH-DOC', document) reply = self._send_message_wait_for_reply(msg) if reply is None: raise RuntimeError("Connection to server was lost") elif reply.header['msgtype'] == 'ERROR': raise RuntimeErro...
Push a document to the server, overwriting any existing server-side doc. Args: document : (Document) A Document to push to the server Returns: The server reply
juraj-google-style
def set_cache_policy(self, func): if (func is None): func = self.default_cache_policy elif isinstance(func, bool): func = (lambda unused_key, flag=func: flag) self._cache_policy = func
Set the context cache policy function. Args: func: A function that accepts a Key instance as argument and returns a bool indicating if it should be cached. May be None.
codesearchnet
def cumulative_probabilities( self ): partition_function = np.sum( self.p ) return np.cumsum( self.p ) / partition_function
Cumulative sum of the relative probabilities for all possible jumps. Args: None Returns: (np.array): Cumulative sum of relative jump probabilities.
juraj-google-style
def check_prerequisites( prerequisites, checker, msg_tmpl='Prerequisites "{}" are required in method "{}" but not ' 'found, please install them first.'): def wrap(func): @functools.wraps(func) def wrapped_func(*args, **kwargs): requirements = [prere...
A decorator factory to check if prerequisites are satisfied. Args: prerequisites (str of list[str]): Prerequisites to be checked. checker (callable): The checker method that returns True if a prerequisite is meet, False otherwise. msg_tmpl (str): The message template with two variables. Returns: decorator: A specific...
juraj-google-style
def combine(args, part=None): args = [cleanup(arg) for arg in args] if (part is not None): (parts, orders) = part if (numpy.array(orders).size == 1): orders = ([int(numpy.array(orders).item())] * len(args)) parts = numpy.array(parts).flatten() for (i, arg) in enumerat...
All linear combination of a list of list. Args: args (numpy.ndarray) : List of input arrays. Components to take linear combination of with `args[i].shape=(N[i], M[i])` where N is to be taken linear combination of and M is static. M[i] is set to 1 if missing. Returns: (numpy.array) : matrix of combinations with shap...
codesearchnet
def _ParseCString(self, page_data, string_offset): cstring_map = self._GetDataTypeMap('cstring') try: value_string = self._ReadStructureFromByteStream(page_data[string_offset:], string_offset, cstring_map) except (ValueError, errors.ParseError) as exception: raise errors.ParseError('Unable t...
Parses a C string from the page data. Args: page_data (bytes): page data. string_offset (int): offset of the string relative to the start of the page. Returns: str: string. Raises: ParseError: when the string cannot be parsed.
codesearchnet
def label_search(self, label:str) -> List[dict]: ilx_rows = self.label2rows(self.local_degrade(label)) if not ilx_rows: return None else: return ilx_rows
Returns the rows in InterLex associated with that label Note: Pressumed to have duplicated labels in InterLex Args: label: label of the entity you want to find Returns: None or List[dict]
juraj-google-style
def _overload_operator(cls, tensor_class, operator): tensor_oper = getattr(tensor_class, operator) tensor_oper = getattr(tensor_oper, '__func__', tensor_oper) setattr(cls, operator, tensor_oper)
Overload an operator with the same implementation as a base Tensor class. We pull the operator out of the class dynamically to avoid ordering issues. Args: tensor_class: The (Composite)Tensor to get the method from. operator: string. The operator name.
github-repos
def read(self, size=None): if not self._is_open: raise IOError('Not opened.') if self._current_offset < 0: raise IOError('Invalid current offset value less than zero.') if self._current_offset >= self._size: return b'' if size is None or self._current_offset + size > ...
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if...
juraj-google-style
def insert(self, optional_root_locations_path): encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple opt...
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots
juraj-google-style
def promote_artifacts(self, promote_stage='latest'): if (promote_stage.lower() == 'alpha'): self._sync_to_uri(self.s3_canary_uri) elif (promote_stage.lower() == 'canary'): self._sync_to_uri(self.s3_latest_uri) else: self._sync_to_uri(self.s3_latest_uri)
Promote artifact version to dest. Args: promote_stage (string): Stage that is being promoted
codesearchnet
def read_int8(self, little_endian=True): if little_endian: endian = "<" else: endian = ">" return self.unpack('%sb' % endian)
Read 1 byte as a signed integer value from the stream. Args: little_endian (bool): specify the endianness. (Default) Little endian. Returns: int:
juraj-google-style
def _merge_tensor_signatures(self, signatures): sorted_update = [] if self._num_signature_dimensions() > 1: signature_indices = self._signature_types() for _, val in sorted(signatures.items(), key=lambda item: signature_indices[item[0]]): sorted_update.append(val) updates = a...
Returns a tensor that merges the given signatures. Args: signatures: A dictionary of the signature updates from signature name to a tensor of dimension [1]. Returns: A tensor that concats the signature values in a predefined order. Raises: ValueError: Unable to merge signatures.
github-repos
def _ReadEncryptedData(self, read_size): encrypted_data = self._file_object.read(read_size) read_count = len(encrypted_data) self._encrypted_data = b''.join([self._encrypted_data, encrypted_data]) self._decrypted_data, self._encrypted_data = ( self._decrypter.Decrypt(self._encrypted_data...
Reads encrypted data from the file-like object. Args: read_size (int): number of bytes of encrypted data to read. Returns: int: number of bytes of encrypted data read.
juraj-google-style
def on_predict_batch_end(self, batch, logs=None):
Called at the end of a batch in `predict` methods. Subclasses should override for any actions to run. Note that if the `steps_per_execution` argument to `compile` in `tf.keras.Model` is set to `N`, this method will only be called every `N` batches. Args: batch: Integer, index of batch within the current epoch. logs:...
github-repos
def merge_two_dictionaries(a, b, merge_lists=False): key = None try: if ((a is None) or isinstance(a, (six.string_types, six.text_type, six.integer_types, float))): a = b elif isinstance(a, list): if isinstance(b, list): if merge_lists: ...
Merges b into a and returns merged result NOTE: tuples and arbitrary objects are not handled as it is totally ambiguous what should happen Args: a (DictUpperBound): dictionary to merge into b (DictUpperBound): dictionary to merge from merge_lists (bool): Whether to merge lists (True) or replace lists (False). Default...
codesearchnet
def nb_ll_row(params, data_row): p = params[0] r = params[1] n = len(data_row) ll = np.sum(gammaln(data_row + r)) - np.sum(gammaln(data_row + 1)) ll -= n*gammaln(r) ll += np.sum(data_row)*np.log(p) ll += n*r*np.log(1-p) return -ll
returns the negative LL of a single row. Args: params (array) - [p, r] data_row (array) - 1d array of data Returns: LL of row
juraj-google-style
def __init__(self, submission_id, submissions, storage_bucket): super(DefenseSubmission, self).__init__(submission_id, submissions, storage_bucket) if self.type != TYPE_DEFENSE: raise WorkerError('Incorrect defense type for submission "{0}"'.format( ...
Initializes DefenseSubmission. Args: submission_id: ID of the submission submissions: instance of CompetitionSubmissions with all submissions storage_bucket: storage bucket where all submissions are stored Raises: WorkerError: if submission has incorrect type
juraj-google-style
def __init__(self, feed_fn): self.feed_fn = feed_fn
Initializes a `FeedFnHook`. Args: feed_fn: function that takes no arguments and returns `dict` of `Tensor` to feed.
github-repos
def check_denotation(target_values, predicted_values): if (len(target_values) != len(predicted_values)): return False for target in target_values: if (not any((target.match(pred) for pred in predicted_values))): return False return True
Return True if the predicted denotation is correct. Args: target_values (list[Value]) predicted_values (list[Value]) Returns: bool
codesearchnet
def ParseContactRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = TwitterIOSContactEventData() event_data.description = self._GetRowValue(query_hash, row, 'description') event_data.followers_count = self._GetRowValue( query_hash, row, 'followers...
Parses a contact row from the database. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row resulting from query.
juraj-google-style
def _send_json(self, method, path, data): headers = {'Content-type': 'application/json'} return self._make_request(method, path, data=data, headers=headers)
Make a application/json request. Args: `method`: The method of the request (POST or PUT). `path`: The path to the resource. `data`: The JSON-encoded data. Returns: The content of the response. Raises: An exception depending on the HTTP status code of the response.
juraj-google-style
def __init__(self, name, client=None): self._name = name self.client = client self._state = None
Create a Thing. Args: name (str): name of the Thing. This corresponds to the AWS IoT Thing name. client (str): MQTT client connection to use. This can be set anytime before publishing Thing messages to the server.
juraj-google-style
def _apply(self, ctx: ExtensionContext) -> AugmentedDict: node_key, node_value = ctx.node def process(pattern: Pattern[str], _str: str) -> str: _match = pattern.match(_str) if _match is None: return _str placeholder,...
Replaces any {{env::*}} directives with it's actual environment variable value or a default. Args: ctx: The processing context. Returns: Returns the altered node key and value.
juraj-google-style
def FetchAllGraphSeries(label, report_type, period=None, token=None): if _ShouldUseLegacyDatastore(): return _FetchAllGraphSeriesFromTheLegacyDB(label, report_type, period=period, token=token) if (period is None): time_range = None else: range_end = rdfvalue.RDFDatetime.Now() ...
Fetches graph series for the given label and report-type from the DB. Args: label: Client label to fetch data for. report_type: rdf_stats.ClientGraphSeries.ReportType to fetch data for. period: rdfvalue.Duration specifying how far back in time to fetch data. If not provided, all data for the given label and report-typ...
codesearchnet
def get(self, name, *default): curr = self.values for part in name.split('.'): if (part in curr): curr = curr[part] elif default: return default[0] else: fmt = "Context value '{}' does not exist:\n{}" raise AttributeError(fmt.format(name, u...
Get context value with the given name and optional default. Args: name (str): The name of the context value. *default (Any): If given and the key doesn't not exist, this will be returned instead. If it's not given and the context value does not exist, `AttributeError` will be raised Returns: The requested context val...
codesearchnet
def is_text(self): return (self.type in [self._TYPE_PASTE, self._TYPE_TEXT, self._TYPE_TWEET])
Tells if this message is a text message. Returns: bool. Success
codesearchnet
def parse_step(step_name): prefix = 'step' step_name = step_name.lower().replace(' ', '_') step_name = step_name[len(prefix):] if prefix and step_name.startswith(prefix) else step_name return step_name.strip(':_')
Replaces white spaces and removes 'Step:' label Args: step_name(str): step name passed in metric ParDo Returns: lower case step name without namespace and step label
github-repos
def publish(cls, message, client_filter=None): with cls._lock: for client in cls.subscribers: if ((not client_filter) or client_filter(client)): client.send(message)
Publish messages to subscribers. Args: message: The message to publish. client_filter: A filter function to call passing in each client. Only clients for whom the function returns True will have the message sent to them.
codesearchnet
def teardown(self, *args, **kwargs): pass
Called to clean up an instance before it is discarded. If you are using Dataflow, you need to enable Dataflow Runner V2 before using this feature. Args: *args: Additional arguments and side inputs. **kwargs: Additional arguments and side inputs.
github-repos
def index_add(x, idx, y): return _index_update_helper(tf_np.ndarray._with_index_add, x, idx, y)
Pure equivalent of `x[idx] += y`. Returns the value of x that would result from the NumPy-style indexed assignment `x[idx] += y`. Because it's a pure function, `x` itself won't be changed. Args: x: an array with the values to be updated. idx: a Numpy-style index, consisting of `None`, integers, slice objects, ellipse...
github-repos
def guess_depth(packages): if len(packages) == 1: return packages[0].count('.') + 2 return min(p.count('.') for p in packages) + 1
Guess the optimal depth to use for the given list of arguments. Args: packages (list of str): list of packages. Returns: int: guessed depth to use.
juraj-google-style
def _separate_string(string: str, stride: int, separator: str) -> str: result = '' for i, c in enumerate(string): if i > 0 and i % stride == 0: result += separator result += c return result
Returns a separated string by separator at multiples of stride. For example, the input: * string: 'thequickbrownfoxjumpedoverthelazydog' * stride: 3 * separator: '-' Would produce a return value of: 'the-qui-ckb-row-nfo-xju-mpe-dov-ert-hel-azy-dog' Args: string: The string to split. stride: The interval to insert th...
github-repos
def generate_string(self, initial_logits, initial_state, sequence_length): current_logits = initial_logits current_state = initial_state generated_letters = [] for _ in range(sequence_length): char_index = tf.squeeze(tf.multinomial(current_logits, 1)) char_one_hot = tf.one_hot(char_index...
Builds sub-graph to generate a string, sampled from the model. Args: initial_logits: Starting logits to sample from. initial_state: Starting state for the RNN core. sequence_length: Number of characters to sample. Returns: A Tensor of characters, with dimensions `[sequence_length, batch_size, output_size]`.
codesearchnet
def from_file(cls, path): with open(path, 'r', errors='replace') as f: return cls(f.read())
Create a text from a file. Args: path (str): The file path.
juraj-google-style
def _AddPropertiesForNonRepeatedScalarField(field, cls): proto_field_name = field.name property_name = _PropertyName(proto_field_name) type_checker = type_checkers.GetTypeChecker(field) default_value = field.default_value valid_values = set() is_proto3 = (field.containing_type.syntax == 'proto3'...
Adds a public property for a nonrepeated, scalar protocol message field. Clients can use this property to get and directly set the value of the field. Note that when the client sets the value of a field by using this property, all necessary "has" bits are set as a side-effect, and we also perform type-checking. Args: ...
codesearchnet
def __init__(self, function_name, level=1, children_inputs_mappings=None, **kwargs): self._function_name = function_name self._level = level if self._level == 1: assert children_inputs_mappings is None else: assert isinstance(children_inputs_mappings, dict) self._children_inputs_mapp...
Create a OpHint. Args: function_name: Name of the function (the custom op name in tflite) level: OpHint level. children_inputs_mappings: Children OpHint inputs/outputs mapping. children_inputs_mappings should like below: "parent_first_child_input": [{"parent_input_index": num, "child_input_index": num}, ...] "parent_l...
github-repos
def exit_hook(callable, once=True): if (once and (callable in ExitHooks)): return ExitHooks.append(callable)
r"""A decorator that makes the decorated function to run while ec exits. Args: callable (callable): The target callable. once (bool): Avoids adding a func to the hooks, if it has been added already. Defaults to True. Note: Hooks are processedd in a LIFO order.
codesearchnet
def play_alert(zones, alert_uri, alert_volume=20, alert_duration=0, fade_back=False): for zone in zones: zone.snap = Snapshot(zone) zone.snap.snapshot() print('snapshot of zone: {}'.format(zone.player_name)) for zone in zones: if zone.is_coordinator: ...
Demo function using soco.snapshot across multiple Sonos players. Args: zones (set): a set of SoCo objects alert_uri (str): uri that Sonos can play as an alert alert_volume (int): volume level for playing alert (0 tp 100) alert_duration (int): length of alert (if zero then length of track) fade_back (bool): on reinstat...
juraj-google-style
def add_ast_fn(d, spec, parent_function=None): if (d['type'] == 'Function'): ast_fn = Function(d['function']['name'], spec, parent_function=parent_function) for arg in d['args']: if (arg['type'] == 'Function'): ast_fn.add_argument(add_ast_fn(arg, spec, parent_function=ast...
Convert dict AST to object AST Function Args: ast_fn: AST object Function d: AST as dictionary spec: BEL Specification Return: ast_fn
codesearchnet
def post_process(self, outputs, target_sizes): logging.warning_once('`post_process` is deprecated and will be removed in v5 of Transformers, please use `post_process_object_detection` instead, with `threshold=0.` for equivalent results.') out_logits, out_bbox = (outputs.logits, outputs.pred_boxes) if len(ou...
Converts the output of [`ConditionalDetrForObjectDetection`] into the format expected by the Pascal VOC format (xmin, ymin, xmax, ymax). Only supports PyTorch. Args: outputs ([`ConditionalDetrObjectDetectionOutput`]): Raw outputs of the model. target_sizes (`torch.Tensor` of shape `(batch_size, 2)`): Tensor containing...
github-repos
def validate(self, scope: ValidationScope = ValidationScope.all, ctype: ContentType = ContentType.config) -> None: self.schema_node._validate(self, scope, ctype)
Validate the receiver's value. Args: scope: Scope of the validation (syntax, semantics or all). ctype: Receiver's content type. Raises: SchemaError: If the value doesn't conform to the schema. SemanticError: If the value violates a semantic constraint. YangTypeError: If the value is a scalar of incorrect type.
juraj-google-style
def get_block(self, block_id): block = backend.query.get_block(self.connection, block_id) latest_block = self.get_latest_block() latest_block_height = latest_block['height'] if latest_block else 0 if not block and block_id > latest_block_height: return res...
Get the block with the specified `block_id`. Returns the block corresponding to `block_id` or None if no match is found. Args: block_id (int): block id of the block to get.
juraj-google-style
def predict_proba(self, x, batch_size=32, verbose=0): warnings.warn('`model.predict_proba()` is deprecated and will be removed after 2021-01-01. Please use `model.predict()` instead.') preds = self.predict(x, batch_size, verbose) if preds.min() < 0.0 or preds.max() > 1.0: logging.warning('Network re...
Generates class probability predictions for the input samples. The input samples are processed batch by batch. Args: x: input data, as a Numpy array or list of Numpy arrays (if the model has multiple inputs). batch_size: integer. verbose: verbosity mode, 0 or 1. Returns: A Numpy array of probability predictions.
github-repos