code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def remove(self, force=False): return self.client.api.remove_plugin(self.name, force=force)
Remove the plugin from the server. Args: force (bool): Remove even if the plugin is enabled. Default: False Raises: :py:class:`docker.errors.APIError` If the server returns an error.
codesearchnet
def mtf_range(mesh, dim, dtype, name=None): dim = convert_to_dimension(dim) with tf.variable_scope(name, default_name='range'): if (dtype == tf.bfloat16): tf_range = tf.cast(tf.range(dim.size), tf.bfloat16) else: tf_range = tf.range(dim.size, dtype=dtype) return i...
Create a 1d mesh tensor with a range from [0, dim.size). Call externally as mtf.range() Args: mesh: a Mesh dim: a Dimension dtype: a tf.DType name: an optional string Returns: a Tensor
codesearchnet
def action(elem, doc): if isinstance(elem, pf.CodeBlock): doc.listings_counter += 1 elems = ([elem] if ('hide' not in elem.classes) else []) if ('file' in elem.attributes): elem.text = read_file(elem.attributes['file']) filename = trimpath(elem.attributes) ...
Processes pf.CodeBlocks. For details and a specification of how each command should behave, check the example files (especially the md and pdf)! Args: elem: The element to process. doc: The document. Returns: A changed element or None.
codesearchnet
def enable_eager_execution_internal(config=None, device_policy=None, execution_mode=None, server_def=None) -> None: if config is not None and (not isinstance(config, config_pb2.ConfigProto)): raise TypeError('config must be a tf.ConfigProto, but got %s' % type(config)) if device_policy not in (None, con...
Enables eager execution for the lifetime of this program. Most of the doc string for enable_eager_execution is relevant here as well. Args: config: See enable_eager_execution doc string device_policy: See enable_eager_execution doc string execution_mode: See enable_eager_execution doc string server_def: (Optional.) A...
github-repos
def squid_to_guid(squid): squid_pattern = re.compile(r'^(\w{8})(\w{4})(\w{4})(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)(\w\w)$') squid_match = squid_pattern.match(squid) guid = '' if squid_match is not None: guid = '{' + \ squid_match.group(1)[::-1]+'-' + \ squ...
Converts a compressed GUID (SQUID) back into a GUID Args: squid (str): A valid compressed GUID Returns: str: A valid GUID
juraj-google-style
def xor_bytes(a, b): assert isinstance(a, bytes) assert isinstance(b, bytes) assert len(a) == len(b) res = bytearray() for i in range(len(a)): res.append(a[i] ^ b[i]) return bytes(res)
XOR on two bytes objects Args: a (bytes): object 1 b (bytes): object 2 Returns: bytes: The XOR result
juraj-google-style
def generate_masks_with_special_tokens_and_transfer_map(input_ids: torch.LongTensor) -> Tuple[Tensor, Tensor]: batch_size, num_token = input_ids.shape special_tokens_mask = torch.zeros((batch_size, num_token), device=input_ids.device).bool() for special_token in SPECIAL_TOKENS: special_tokens_mask |...
Generate attention mask between each pair of special tokens and positional ids. Args: input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. Returns: `tuple(torch.Tensor)` comprising attention mask between each special tokens and position_ids: - **at...
github-repos
def IsOutOfLineMethodDefinition(clean_lines, linenum): for i in xrange(linenum, max((- 1), (linenum - 10)), (- 1)): if Match('^([^()]*\\w+)\\(', clean_lines.elided[i]): return (Match('^[^()]*\\w+::\\w+\\(', clean_lines.elided[i]) is not None) return False
Check if current line contains an out-of-line method definition. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. Returns: True if current line contains an out-of-line method definition.
codesearchnet
def adafactor_decay_rate_adam(beta2): t = tf.to_float(tf.train.get_or_create_global_step()) + 1.0 decay = beta2 * (1.0 - tf.pow(beta2, t - 1.0)) / (1.0 - tf.pow(beta2, t)) return decay
Second-moment decay rate like Adam, subsuming the correction factor. Args: beta2: a float between 0 and 1 Returns: a scalar
juraj-google-style
def l2_regularizer(weight=1.0, scope=None): def regularizer(tensor): with tf.name_scope(scope, 'L2Regularizer', [tensor]): l2_weight = tf.convert_to_tensor(weight, dtype=tensor.dtype.base_dtype, name='weight') return tf.m...
Define a L2 regularizer. Args: weight: scale the loss by this factor. scope: Optional scope for name_scope. Returns: a regularizer function.
juraj-google-style
def ExpectedEnginesToBuild(self, run_params): return [f'TRTEngineOp_{seq_id:03d}' for seq_id in range(len(self.max_batch_sizes))]
Checks that the expected engine is built. Args: run_params: the run parameters. Returns: the expected engines to build. There shall be engines generated for each maximum batch size.
github-repos
def start_instance(self): start_url = self._get_url('start_path') res = self.rest_client.session.put(start_url, json={}) _handle_http_errors(res) return res.json()
Start the instance for this Streaming Analytics service. Returns: dict: JSON response for the instance start operation.
codesearchnet
def add_output(self, name, value): self.template.add_output(Output(name, Value=value))
Simple helper for adding outputs. Args: name (str): The name of the output to create. value (str): The value to put in the output.
codesearchnet
def _callback_main(self, call, handler='edit_config', target='running', source='startup'): try: if (handler == 'get_config'): call = ET.tostring(call.getchildren()[0]) return self._mgr.get(filter=('subtree', call)) call = ET.tostring(call) if (handler == 'get'): ...
Callback for NETCONF calls. Args: call: An Element Tree element containing the XML of the NETCONF call you intend to make to the device. handler: Type of ncclient call to make. get_config: NETCONF standard get config. get: ncclient dispatch. For custom RPCs. edit_config: NETCONF standard edit. delete_config: NETCONF s...
codesearchnet
def __init__(self, structure, defect_site, charge=0.): self._structure = structure self._charge = charge self._defect_site = defect_site if structure.lattice != defect_site.lattice: raise ValueError("defect_site lattice must be same as structure lattice.")
Initializes an abstract defect Args: structure: Pymatgen Structure without any defects defect_site (Site): site for defect within structure must have same lattice as structure charge: (int or float) defect charge default is zero, meaning no change to NELECT after defect is created in the structure (assuming use_struct...
juraj-google-style
def dbmin50years(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbmin50years`'.format(value)) self._...
Corresponds to IDD Field `dbmin50years` 50-year return period values for minimum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmin50years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a...
juraj-google-style
def watch(self, path, recursive=False): self._logger.info('Initializing watcher for path "%s"', path) handler = FileHandler(self) self._observer = Observer() self._observer.schedule(handler, path, recursive) self._logger.info('Starting watcher') self._observer....
Watch for files in a directory and apply normalizations. Watch for new or changed files in a directory and apply normalizations over them. Args: path: Path to the directory. recursive: Whether to find files recursively or not.
juraj-google-style
def cancelMktDepth(self, contract: Contract, isSmartDepth=False): ticker = self.ticker(contract) reqId = self.wrapper.endTicker(ticker, 'mktDepth') if reqId: self.client.cancelMktDepth(reqId, isSmartDepth) else: self._logger.error(f'cancelMktDepth: No reqId found for contract {contract}'...
Unsubscribe from market depth data. Args: contract: The exact contract object that was used to subscribe with.
codesearchnet
def _nested_from_proto(nested_proto, process_leafs): if (not isinstance(nested_proto, module_pb2.NestedData)): raise base_errors.ModuleInfoError('Expected module_pb2.NestedData.') if nested_proto.HasField('value'): value = nested_proto.value if (not value): value = _Unseriali...
Deserializes `nested_proto`. Args: nested_proto: An instance of `module_pb2.NestedData`. process_leafs: A function to be applied to the leaf values of the nested structure. Returns: An instance of `string`, `tuple`, `dict` or `namedtuple`. Raises: base_errors.ModuleInfoError: If the probobuf is of the wrong type or ...
codesearchnet
def tensor_layout(self, tensor_shape, mesh_shape): ret = [self.tensor_dimension_to_mesh_axis(d, mesh_shape) for d in tensor_shape] not_nones = [a for a in ret if a is not None] if len(not_nones) != len(set(not_nones)): raise ValueError( "Two Tensor Dimensions may not map to t...
Computes TensorLayout given a Tensor Shape and a Mesh Shape. Args: tensor_shape: Shape. mesh_shape: Shape. Returns: TensorLayout. Raises: ValueError: If two Tensor Dimensions map to the same Mesh Dimensions.
juraj-google-style
def fresh(t, non_generic): mappings = {} def freshrec(tp): p = prune(tp) if isinstance(p, TypeVariable): if is_generic(p, non_generic): if p not in mappings: mappings[p] = TypeVariable() return mappings[p] else:...
Makes a copy of a type expression. The type t is copied. The generic variables are duplicated and the non_generic variables are shared. Args: t: A type to be copied. non_generic: A set of non-generic TypeVariables
juraj-google-style
def list_current_jobs(self): jobs = {} for job in self.scheduler.get_jobs(): if (job.name not in ('schedule_jobs', 'process_status_queue')): jobs[job.name] = job return jobs
Return a list of the currently scheduled jobs in APScheduler Returns: `dict` of `str`: :obj:`apscheduler/job:Job`
codesearchnet
def Detect(self, str_in): components = SplitIntoComponents(str_in) extracted_paths = set() for extractor in self.extractors: extracted_paths.update(extractor.Extract(components)) results = set(extracted_paths) for post_processor in self.post_processors: processed_results = set() ...
Detects paths in a given string. Args: str_in: String where the paths should be detected. Returns: A list of paths (as strings) detected inside the given string.
codesearchnet
def isInstalled(value): function = command = .format(f = function, arg=value) cmd = CommandHelper(command) cmd.execute() return "1" in cmd.output
Check if a software is installed into machine. Args: value (str): Software's name Returns: bool: True if the software is installed. False else
juraj-google-style
def stream_reader_statements(stream_arn): action_type = get_stream_action_type(stream_arn) arn_parts = stream_arn.split('/') wildcard_arn_parts = arn_parts[:(- 1)] wildcard_arn_parts.append('*') wildcard_arn = '/'.join(wildcard_arn_parts) return [Statement(Effect=Allow, Resource=[stream_arn], Ac...
Returns statements to allow Lambda to read from a stream. Handles both DynamoDB & Kinesis streams. Automatically figures out the type of stream, and provides the correct actions from the supplied Arn. Arg: stream_arn (str): A kinesis or dynamodb stream arn. Returns: list: A list of statements.
codesearchnet
def fastcc_consistent_subset(model, epsilon, solver): reaction_set = set(model.reactions) return reaction_set.difference(fastcc(model, epsilon, solver))
Return consistent subset of model. The largest consistent subset is returned as a set of reaction names. Args: model: :class:`MetabolicModel` to solve. epsilon: Flux threshold value. solver: LP solver instance to use. Returns: Set of reaction IDs in the consistent reaction subset.
juraj-google-style
def run_validate_program_main(self, program_main): program_language = self.profile.get('install_json').get('programLanguage', 'python').lower() if ((program_language == 'python') and (not os.path.isfile('{}.py'.format(program_main)))): print('{}{}Could not find program main file ({}).'.format(c.Style.BR...
Validate the program main file exists. Args: program_main (str): The executable name.
codesearchnet
def get_ss_class(pdb_file, dssp_file, chain): prag = pr.parsePDB(pdb_file) pr.parseDSSP(dssp_file, prag) (alpha, threeTen, beta) = get_dssp_ss_content_multiplechains(prag, chain) if ((alpha == 0) and (beta > 0)): classification = 'all-beta' elif ((beta == 0) and (alpha > 0)): classif...
Define the secondary structure class of a PDB file at the specific chain Args: pdb_file: dssp_file: chain: Returns:
codesearchnet
def retry_auth_check(exception): if isinstance(exception, apiclient.errors.HttpError): if exception.resp.status in HTTP_AUTH_ERROR_CODES: _print_error('Retrying...') return True return False
Specific check for auth error codes. Return True if we should retry. False otherwise. Args: exception: An exception to test for transience. Returns: True if we should retry. False otherwise.
juraj-google-style
def get_vcenter(self, **kwargs): config = ET.Element('config') urn = 'urn:brocade.com:mgmt:brocade-vswitch' ET.SubElement(config, 'vcenter', xmlns=urn) output = self._callback(config, handler='get_config') result = [] element = ET.fromstring(str(output)) for vcenter in element.iter(('{%s}vce...
Get vCenter hosts on the switch Args: callback (function): A function executed upon completion of the method. Returns: Returns a list of vcenters Raises: None
codesearchnet
def get(self, *index): assert (self.wrapFunction is not None) if ((len(index) == 1) and isinstance(index[0], (tuple, list))): index = index[0] if (len(index) == 0): return self.wrapFunction(self._impl.get()) else: return self.wrapFunction(self._impl.get(Tuple(index)._impl))
Get the instance with the specified index. Returns: The corresponding instance.
codesearchnet
def clear(self, size=-1, *, offset=0, chunk=None) -> None: self.mglo.clear(size, offset, chunk)
Clear the content. Args: size (int): The size. Value ``-1`` means all. Keyword Args: offset (int): The offset. chunk (bytes): The chunk to use repeatedly.
juraj-google-style
def MultiDestroyFlowStates(self, session_ids, request_limit=None): subjects = [session_id.Add("state") for session_id in session_ids] to_delete = [] deleted_requests = [] for subject, values in self.MultiResolvePrefix( subjects, self.FLOW_REQUEST_PREFIX, limit=request_limit): for _,...
Deletes all requests and responses for the given flows. Args: session_ids: A lists of flows to destroy. request_limit: A limit on the number of requests to delete. Returns: A list of requests that were deleted.
juraj-google-style
def get_template_list(self, page=1, page_size=None, account_id=None, query=None): request = self._get_request() parameters = {'page': page, 'page_size': page_size, 'account_id': account_id, 'query': query} return request.get(self.TEMPLATE_GET_LIST_URL, parameters=parameters)
Lists your Templates Args: page (int, optional): Page number of the template List to return. Defaults to 1. page_size (int, optional): Number of objects to be returned per page, must be between 1 and 100, default is 20. account_id (str, optional): Which account to return Templates for. Must be a te...
codesearchnet
def save(self, file_prefix: str, options: Optional[checkpoint_options.CheckpointOptions]=None) -> Optional[ops.Operation]: if options is not None and options.experimental_io_device is not None: raise ValueError('Specified experimental_io_device in DTensor checkpoint is not supported.') del options t...
Saves the saveable objects to a checkpoint with `file_prefix`. Also query the generated shards from the distributed DTensor SaveV2 ops and do a MergeV2 on those. Each op here is backed by a global_barrier to avoid racing from multiple clients. Args: file_prefix: A string or scalar string Tensor containing the prefix ...
github-repos
def flowwrite(flow, filename, quantize=False, concat_axis=0, *args, **kwargs): if (not quantize): with open(filename, 'wb') as f: f.write('PIEH'.encode('utf-8')) np.array([flow.shape[1], flow.shape[0]], dtype=np.int32).tofile(f) flow = flow.astype(np.float32) ...
Write optical flow to file. If the flow is not quantized, it will be saved as a .flo file losslessly, otherwise a jpeg image which is lossy but of much smaller size. (dx and dy will be concatenated horizontally into a single image if quantize is True.) Args: flow (ndarray): (h, w, 2) array of optical flow. filename (...
codesearchnet
def apply_mutation(module_path, operator, occurrence): module_ast = get_ast(module_path, python_version=operator.python_version) original_code = module_ast.get_code() visitor = MutationVisitor(occurrence, operator) mutated_ast = visitor.walk(module_ast) mutated_code = None if visitor.mutation_ap...
Apply a specific mutation to a file on disk. Args: module_path: The path to the module to mutate. operator: The `operator` instance to use. occurrence: The occurrence of the operator to apply. Returns: A `(unmutated-code, mutated-code)` tuple to the with-block. If there was no mutation performed, the `mutated-code` i...
codesearchnet
def get_estimator(output_dir, train_config, args): target_name = train_config['target_column'] if is_classification_model(args.model_type) and target_name not in \ train_config['categorical_columns']: raise ValueError('When using a classification model, the target must be a ' ...
Returns a tf learn estimator. We only support {DNN, Linear}Regressor and {DNN, Linear}Classifier. This is controlled by the values of model_type in the args. Args: output_dir: Modes are saved into outputdir/train train_config: our training config args: command line parameters Returns: TF lean estimator Raises: Valu...
juraj-google-style
def _ragged_tensor_to_string(string_tensor, summarize): if string_tensor.shape.rank == 1: pieces = string_tensor else: pieces = map_fn_lib.map_fn(lambda s: _ragged_tensor_to_string(s, summarize), string_tensor, fn_output_signature=tensor_lib.TensorSpec(None, dtypes.string)) if summarize not ...
Returns a scalar string tensor with the contents of `string_tensor`. Args: string_tensor: A potentially ragged tensor with dtype=string. summarize: Include only the first and last `summarize` elements of each dimension. If `-1` or `None`, then include all elements. Returns: A scalar string Tensor.
github-repos
def dump(ofp, *pb_objs, **kwargs): mode = 'wb' if isinstance(ofp, str): ostream = open(ofp, mode=mode, **kwargs) else: ostream = open(fileobj=ofp, mode=mode, **kwargs) with ostream: ostream.write(*pb_objs)
Write to a stream. Args: ofp (string or file-like object): output stream. pb_objs (*protobuf.message.Message): list of protobuf message objects to be written.
juraj-google-style
def collective_diffusion_coefficient( self ): if self.has_run: return self.atoms.collective_dr_squared() / ( 6.0 * self.lattice.time ) else: return None
Returns the collective or "jump" diffusion coefficient, D_J. Args: None Returns: (Float): The collective diffusion coefficient, D_J.
juraj-google-style
def aggregate(self, batch_outs, batch_start=None, batch_end=None): raise NotImplementedError('Must be implemented in subclasses.')
Aggregates batch-level results into total results. Args: batch_outs: A list of batch-level outputs. batch_start: The start index of this batch. Always `None` if `use_steps` is `True`. batch_end: The end index of this batch. Always `None` if `use_steps` is `True`.
github-repos
def __init__(self, dimensions, hidden_size): super(LearnableMultivariateNormalDiagCell, self).__init__() self.dimensions = dimensions self.hidden_size = hidden_size self.lstm_cell = tf.keras.layers.LSTMCell(hidden_size) self.output_layer = tf.keras.layers.Dense(2*dimensions)
Constructs a learnable multivariate diagonal normal cell. Args: dimensions: An integer corresponding to the dimensionality of the distribution. hidden_size: Dimensionality of the LSTM function parameters.
juraj-google-style
def convert_transpose(params, w_name, scope_name, inputs, layers, weights, names): print('Converting transpose ...') if params['perm'][0] != 0: if inputs[0] in layers: print('!!! Cannot permute batch dimension. Result may be wrong !!!') layers[scope_name] = layers[inputs[0]]...
Convert transpose layer. Args: params: dictionary with layer parameters w_name: name prefix in state_dict scope_name: pytorch scope name inputs: pytorch node inputs layers: dictionary with keras tensors weights: pytorch state_dict names: use short names for keras layers
juraj-google-style
def pp_hex(raw, reverse=True): if not reverse: return ''.join(['{:02x}'.format(v) for v in bytearray(raw)]) return ''.join(reversed(['{:02x}'.format(v) for v in bytearray(raw)]))
Return a pretty-printed (hex style) version of a binary string. Args: raw (bytes): any sequence of bytes reverse (bool): True if output should be in reverse order. Returns: Hex string corresponding to input byte sequence.
juraj-google-style
def _FormatReturnOrExitToken(self, token_data): error_string = bsmtoken.BSM_ERRORS.get(token_data.status, 'UNKNOWN') return {'error': error_string, 'token_status': token_data.status, 'call_status': token_data.return_value}
Formats a return or exit token as a dictionary of values. Args: token_data (bsm_token_data_exit|bsm_token_data_return32| bsm_token_data_return64): AUT_EXIT, AUT_RETURN32 or AUT_RETURN64 token data. Returns: dict[str, str]: token values.
codesearchnet
def _extract_params(self, kwargs, hyperparameters): init_params = dict() fit_params = dict() produce_params = dict() for (name, param) in hyperparameters.get('fixed', dict()).items(): if (name in kwargs): value = kwargs.pop(name) elif ('default' in param): value =...
Extract init, fit and produce params from kwargs. The `init_params`, `fit_params` and `produce_params` are extracted from the passed `kwargs` taking the metadata hyperparameters as a reference. During this extraction, make sure that all the required hyperparameters have been given and that nothing unexpected exists i...
codesearchnet
def tagscleanupdicts(configuration=None, url=None, keycolumn=5, failchained=True): if (not Tags._tags_dict): if (configuration is None): configuration = Configuration.read() with Download(full_agent=configuration.get_user_agent()) as downloader: if (url is None): ...
Get tags cleanup dictionaries Args: configuration (Optional[Configuration]): HDX configuration. Defaults to global configuration. url (Optional[str]): Url of tags cleanup spreadsheet. Defaults to None (internal configuration parameter). keycolumn (int): Column number of tag column in spreadsheet. Defaults to 5. failch...
codesearchnet
def forward(self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, position_embeddings: Optional[torch.Tensor]=None, output_attentions: bool=False, **kwargs): residual = hidden_states if self.normalize_before: hidden_states = self.self_attn_layer_norm(hidden_states) hidden_states, attn_wei...
Args: hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` attention_mask (`torch.FloatTensor`): attention mask of size `(batch, 1, target_len, source_len)` where padding elements are indicated by very large negative values. position_embeddings (`torch.FloatTensor`, *optional*)...
github-repos
def download_software_version(version=None, synch=False): if not version: raise CommandExecutionError("Version option must not be none.") if not isinstance(synch, bool): raise CommandExecutionError("Synch option must be boolean..") if synch is True: query = {'type': 'op', ...
Download software packages by version number. Args: version(str): The version of the PANOS file to download. synch (bool): If true then the file will synch to the peer unit. CLI Example: .. code-block:: bash salt '*' panos.download_software_version 8.0.0 salt '*' panos.download_software_version 8.0.0 True
juraj-google-style
def __init__(self, n_clusters: int, batch_size: int, is_batched: bool=False): super().__init__() self.n_clusters = n_clusters self.batch_size = batch_size self.is_batched = is_batched
Preprocessing for Clustering Transformation The clustering transform expects batches for performance reasons, therefore this batches the data and converts it to numpy arrays, which are accepted by sklearn. This transform also adds the same key to all batches, such that only 1 state is created and updated during cluster...
github-repos
def read_stream(self, file: IO, data_stream: DataStream) -> Reply: (yield from data_stream.read_file(file=file)) reply = (yield from self._control_stream.read_reply()) self.raise_if_not_match('End stream', ReplyCodes.closing_data_connection, reply) data_stream.close() return reply
Read from the data stream. Args: file: A destination file object or a stream writer. data_stream: The stream of which to read from. Coroutine. Returns: Reply: The final reply.
codesearchnet
def _build_zmat(self, construction_table): c_table = construction_table default_cols = ['atom', 'b', 'bond', 'a', 'angle', 'd', 'dihedral'] optional_cols = list(set(self.columns) - {'atom', 'x', 'y', 'z'}) zmat_frame = pd.DataFrame(columns=default_cols + optional_cols, ...
Create the Zmatrix from a construction table. Args: Construction table (pd.DataFrame): Returns: Zmat: A new instance of :class:`Zmat`.
juraj-google-style
def write(self, data): self._process.poll() if self._process.returncode is not None: raise EOFError('Process ended') self._process.stdin.write(data)
Write *n* bytes to the subprocess' input channel. Args: data(bytes): The data to write. Raises: EOFError: If the process exited.
juraj-google-style
def extract_possible_actions(self, state_arr): agent_x, agent_y = np.where(state_arr[-1] == 1) agent_x, agent_y = agent_x[0], agent_y[0] possible_action_arr = None for x, y in [ (-1, 0), (1, 0), (0, -1), (0, 1), (0, 0) ]: next_x = agent_x + x ...
Extract possible actions. Args: state_arr: `np.ndarray` of state. Returns: `np.ndarray` of actions. The shape is:( `batch size corresponded to each action key`, `channel that is 1`, `feature points1`, `feature points2` )
juraj-google-style
def by_geopoint(self, lat, long): header, content = self._http_request(self.BASE_URL, lat=lat, long=long) return json.loads(content)
Perform a Yelp Neighborhood API Search based on a geopoint. Args: lat - geopoint latitude long - geopoint longitude
juraj-google-style
def unhide_tool(self, context_name, tool_name): data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name in hidden_tools: hidden_tools.remove(tool_name) self._flush_tools()
Unhide a tool so that it may be exposed in a suite. Note that unhiding a tool doesn't guarantee it can be seen - a tool of the same name from a different context may be overriding it. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unhide.
juraj-google-style
def attach_socket(self, **kwargs): return self.client.api.attach_socket(self.id, **kwargs)
Like :py:meth:`attach`, but returns the underlying socket-like object for the HTTP request. Args: params (dict): Dictionary of request parameters (e.g. ``stdout``, ``stderr``, ``stream``). ws (bool): Use websockets instead of raw HTTP. Raises: :py:class:`docker.errors.APIError` If the server returns an error.
juraj-google-style
def _show_all(saved_model_dir): saved_model = saved_model_utils.read_saved_model(saved_model_dir) for meta_graph_def in sorted(saved_model.meta_graphs, key=lambda meta_graph_def: list(meta_graph_def.meta_info_def.tags)): tag_set = meta_graph_def.meta_info_def.tags print("\nMetaGraphDef with tag-...
Prints tag-set, ops, SignatureDef, and Inputs/Outputs of SavedModel. Prints all tag-set, ops, SignatureDef and Inputs/Outputs information stored in SavedModel directory. Args: saved_model_dir: Directory containing the SavedModel to inspect.
github-repos
def remove_child(self, c: 'AbstractSyntaxTree') -> None: if self._children is None: raise ValueError(f'No children belonging to {self!r}.') self._children.remove(c) c.parent = None
Removes a child from the reciever and sets its parent to `None`. Args: c: The child to remove. By default, compared using pointer equality. Raises: ValueError in the event that the child does not being to the underlying list of children.
github-repos
def PublishEvent(cls, event_name, msg, token=None): cls.PublishMultipleEvents({event_name: [msg]}, token=token)
Publish the message into all listeners of the event. We send the message to all event handlers which contain this string in their EVENT static member. This allows the event to be sent to multiple interested listeners. Args: event_name: An event name. msg: The message to send to the event handler. token: ACL token. R...
juraj-google-style
def alwaysThrew(self, error_type=None): if self.callCount == 0: return False if not error_type: return True if len(self.exceptions) == self.callCount else False else: return uch.obj_in_list_always(self.exceptions, error_type)
Determining whether the specified exception is the ONLY thrown exception Args: error_type: None: checking without specified exception Specified Exception Return: Boolean
juraj-google-style
def load_hat(self, path): hat = cv2.imread(path, cv2.IMREAD_UNCHANGED) if hat is None: raise ValueError('No hat image found at `{}`'.format(path)) b, g, r, a = cv2.split(hat) return cv2.merge((r, g, b, a))
Loads the hat from a picture at path. Args: path: The path to load from Returns: The hat data.
juraj-google-style
def cifar_generator(cifar_version, tmp_dir, training, how_many, start_from=0): if (cifar_version == 'cifar10'): url = _CIFAR10_URL train_files = _CIFAR10_TRAIN_FILES test_files = _CIFAR10_TEST_FILES prefix = _CIFAR10_PREFIX image_size = _CIFAR10_IMAGE_SIZE label_key =...
Image generator for CIFAR-10 and 100. Args: cifar_version: string; one of "cifar10" or "cifar100" tmp_dir: path to temporary storage directory. training: a Boolean; if true, we use the train set, otherwise the test set. how_many: how many images and labels to generate. start_from: from which image to start. Returns: ...
codesearchnet
def __init__(self, max_iterations, unroll_loop=False): assert max_iterations >= 0 self.max_iterations = max_iterations assert isinstance(unroll_loop, bool) self.unroll_loop = unroll_loop super(Iterative, self).__init__() self.initialize = tf.make_temp...
Creates a new iterative solver instance. Args: max_iterations: Maximum number of iterations before termination. unroll_loop: Unrolls the TensorFlow while loop if true.
juraj-google-style
def email_address(self, address, owner=None, **kwargs): return EmailAddress(self.tcex, address, owner=owner, **kwargs)
Create the Email Address TI object. Args: owner: address: **kwargs: Return:
codesearchnet
def html_for_modules_method(method_name, *args, **kwargs): method = getattr(modules, method_name) value = method(*args, **kwargs) return KEY_VALUE_TEMPLATE.format(method_name, value)
Returns an HTML snippet for a Modules API method. Args: method_name: A string containing a Modules API method. args: Positional arguments to be passed to the method. kwargs: Keyword arguments to be passed to the method. Returns: String HTML representing the Modules API method and value.
juraj-google-style
def _begin_operation_action(self, action): conn_key = action.data['id'] callback = action.data['callback'] if self._get_connection_state(conn_key) != self.Idle: callback(conn_key, self.id, False, 'Cannot start operation, connection is not idle') return ...
Begin an attempted operation. Args: action (ConnectionAction): the action object describing what we are operating on
juraj-google-style
def _add_arg_java(self, key, value, mask=False): if isinstance(value, bool): value = int(value) self._data[key] = value self._args.append('{}{}={}'.format('-D', key, value)) self._args_quoted.append(self.quote('{}{}={}'.format('-D', key, value))) if mask: value = ('x' * len(str(value...
Add CLI Arg formatted specifically for Java. Args: key (string): The CLI Args key (e.g., --name). value (string): The CLI Args value (e.g., bob). mask (boolean, default:False): Indicates whether no mask value.
codesearchnet
def read_label_list(path): ll = annotations.LabelList() for record in read_label_file(path): ll.add(annotations.Label(record[2], start=record[0], end=record[1])) return ll
Reads labels from an Audacity label file and returns them wrapped in a :py:class:`audiomate.annotations.LabelList`. Args: path (str): Path to the Audacity label file Returns: audiomate.annotations.LabelList: Label list containing the labels
juraj-google-style
def __init__(self, project_name, instance_name, table_name): self.btspec = BigtableSpec(project_name, instance_name, table_name) self.bt_table = bigtable.Client( self.btspec.project, admin=True).instance( self.btspec.instance).table(self.btspec.table) self.tf...
Constructor. Args: project_name: string name of GCP project having table. instance_name: string name of CBT instance in project. table_name: string name of CBT table in instance.
juraj-google-style
def tell(self): self._checkClosed() return self._position
Tell the stream's current offset. Returns: current offset in reading this stream. Raises: ``ValueError``: When this stream is closed.
github-repos
def get_intersection(self, range_): result = [] for entry in self.entries: (package, value) = entry if (value is None): continue if (package.version not in range_): continue if isinstance(value, list): variants = value entry_ = _Pac...
Get a list of variants that intersect with the given range. Args: range_ (`VersionRange`): Package version range. Returns: List of `_PackageEntry` objects.
codesearchnet
def register_event(self, name, callback, validator): async def _validate_and_call(message): payload = message.get('payload') try: payload = validator.verify(payload) except ValidationError: self._logger.warning('Dropping invalid payload for event %s, payload=%s', nam...
Register a callback to receive events. Every event with the matching name will have its payload validated using validator and then will be passed to callback if validation succeeds. Callback must be a normal callback function, coroutines are not allowed. If you need to run a coroutine you are free to schedule it fro...
codesearchnet
def _peer_get_bfd(self, tx, rx, multiplier): tx = self._callback(tx, handler='get_config') rx = self._callback(rx, handler='get_config') multiplier = self._callback(multiplier, handler='get_config') tx = pynos.utilities.return_xml(str(tx)) rx = pynos.utilities.return_xml...
Get and merge the `bfd` config from global BGP. You should not use this method. You probably want `BGP.bfd`. Args: tx: XML document with the XML to get the transmit interval. rx: XML document with the XML to get the receive interval. multiplier: XML document with the XML to get the interval multiplier. Returns: Merg...
juraj-google-style
def load_institute(adapter, internal_id, display_name, sanger_recipients=None): institute_obj = build_institute(internal_id=internal_id, display_name=display_name, sanger_recipients=sanger_recipients) log.info('Loading institute {0} with display name {1} into database'.format(internal_id, display_name)) ada...
Load a institute into the database Args: adapter(MongoAdapter) internal_id(str) display_name(str) sanger_recipients(list(email))
codesearchnet
def wait_for_stateful_block_init(context, mri, timeout=DEFAULT_TIMEOUT): context.when_matches([mri, 'state', 'value'], StatefulStates.READY, bad_values=[StatefulStates.FAULT, StatefulStates.DISABLED], timeout=timeout)
Wait until a Block backed by a StatefulController has initialized Args: context (Context): The context to use to make the child block mri (str): The mri of the child block timeout (float): The maximum time to wait
codesearchnet
def add_attribute_label(self, attribute_id, label): if (not self.can_update()): self._tcex.handle_error(910, [self.type]) return self.tc_requests.add_attribute_label(self.api_type, self.api_sub_type, self.unique_id, attribute_id, label, owner=self.owner)
Adds a security labels to a attribute Args: attribute_id: label: Returns: A response json
codesearchnet
def _map_args(self, node: 'cfg.CFGNode', args: function.Args) -> 'dict[str, cfg.Variable]': posargs = [u.AssignToNewVariable(node) for u in args.posargs] kws = {k: u.AssignToNewVariable(node) for k, u in args.namedargs.items()} sig = self.signature callargs = {name: self.ctx.program.NewVariable(default....
Map call args to function args. This emulates how Python would map arguments of function calls. It takes care of keyword parameters, default parameters, and *args and **kwargs. Args: node: The current CFG node. args: The arguments. Returns: A dictionary, mapping strings (parameter names) to cfg.Variable. Raises: fu...
github-repos
def urlEncodeAndJoin(self, seq, sepr=','): try: from urllib.parse import quote_plus as encode return sepr.join([encode(x, encoding=CHARSET_UTF8) for x in seq]) except ImportError: from urllib import quote as encode return sepr.join([i for i in map...
sepr.join(urlencode(seq)) Args: seq: string list to be urlencoded sepr: join seq with sepr Returns: str
juraj-google-style
def patchify(self, pixel_values, interpolate_pos_encoding: bool=False): patch_size, num_channels = (self.config.patch_size, self.config.num_channels) if shape_list(pixel_values)[1] == num_channels: pixel_values = tf.transpose(pixel_values, perm=(0, 2, 3, 1)) if not interpolate_pos_encoding: ...
Args: pixel_values (`tf.Tensor` of shape `(batch_size, height, width, num_channels)` or `(batch_size, num_channels, height, width)`): Pixel values. interpolate_pos_encoding (`bool`, default `False`): interpolation flag passed during the forward pass. Returns: `tf.Tensor` of shape `(batch_size, num_patches, patch_size*...
github-repos
def lstat(self, entry_path, dir_fd=None): entry_path = self._path_with_dir_fd(entry_path, self.lstat, dir_fd) return self.filesystem.stat(entry_path, follow_symlinks=False)
Return the os.stat-like tuple for entry_path, not following symlinks. Args: entry_path: path to filesystem object to retrieve. dir_fd: If not `None`, the file descriptor of a directory, with `entry_path` being relative to this directory. New in Python 3.3. Returns: the FakeStatResult object corresponding to `entry_p...
juraj-google-style
def __init__(self, prefix): self.bed = PyPlink(prefix) self.bim = self.bed.get_bim() self.fam = self.bed.get_fam() self.bim["multiallelic"] = False self.bim.loc[ self.bim.duplicated(["chrom", "pos"], keep=False), "multiallelic" ]...
Binary plink file reader. Args: prefix (str): the prefix of the Plink binary files.
juraj-google-style
def assert_global_step(global_step_tensor): if not (isinstance(global_step_tensor, variables.Variable) or isinstance(global_step_tensor, tensor.Tensor) or resource_variable_ops.is_resource_variable(global_step_tensor)): raise TypeError('Existing "global_step" must be a Variable or Tensor: %s.' % global_step...
Asserts `global_step_tensor` is a scalar int `Variable` or `Tensor`. Args: global_step_tensor: `Tensor` to test.
github-repos
def set_state(self, vid, value=None, default=False, disable=False): cmds = self.command_builder('state', value=value, default=default, disable=disable) return self.configure_vlan(vid, cmds)
Configures the VLAN state EosVersion: 4.13.7M Args: vid (str): The VLAN ID to configure value (str): The value to set the vlan state to default (bool): Configures the vlan state to its default value disable (bool): Negates the vlan state Returns: True if the operation was successful otherwise False
codesearchnet
def tar_add_bytes(tf, filename, bytestring): if not isinstance(bytestring, bytes): bytestring = bytestring.encode('ascii') buff = io.BytesIO(bytestring) tarinfo = tarfile.TarInfo(filename) tarinfo.size = len(bytestring) tf.addfile(tarinfo, buff)
Add a file to a tar archive Args: tf (tarfile.TarFile): tarfile to add the file to filename (str): path within the tar file bytestring (bytes or str): file contents. Must be :class:`bytes` or ascii-encodable :class:`str`
juraj-google-style
def timeRange( start: datetime.time, end: datetime.time, step: float) -> Iterator[datetime.datetime]: assert step > 0 start = _fillDate(start) end = _fillDate(end) delta = datetime.timedelta(seconds=step) t = start while t < datetime.datetime.now(): t += delta wh...
Iterator that waits periodically until certain time points are reached while yielding those time points. Args: start: Start time, can be specified as datetime.datetime, or as datetime.time in which case today is used as the date end: End time, can be specified as datetime.datetime, or as datetime.time in which case to...
juraj-google-style
def __call__(self, *args): if len(self.formatters) == 0: self.setup(*args) row_cells = [] if self.rownum: row_cells.append(0) if self.timestamp: row_cells.append(datetime.datetime.now()) if self.time_diff: row_cells.appen...
Prints a formatted row Args: args: row cells
juraj-google-style
def create_query(self, fields=None): if fields is None: return Query(self.fields) non_contained_fields = set(fields) - set(self.fields) if non_contained_fields: raise BaseLunrException( "Fields {} are not part of the index", non_contained_fields ...
Convenience method to create a Query with the Index's fields. Args: fields (iterable, optional): The fields to include in the Query, defaults to the Index's `all_fields`. Returns: Query: With the specified fields or all the fields in the Index.
juraj-google-style
def make_supercells_with_defects(self, scaling_matrix): scs = [] sc = self._structure.copy() sc.make_supercell(scaling_matrix) scs.append(sc) for ids, defect_site in enumerate(self._defect_sites): sc_with_inter = sc.copy() sc_with_inter.append( ...
Generate a sequence of supercells in which each supercell contains a single interstitial, except for the first supercell in the sequence which is a copy of the defect-free input structure. Args: scaling_matrix (3x3 integer array): scaling matrix to transform the lattice vectors. Returns: scs ([Structure]): sequence of...
juraj-google-style
def CopyToDict(self): result_dict = {'labels': self.labels} if self.comment: result_dict['comment'] = self.comment return result_dict
Copies the event tag to a dictionary. Returns: dict[str, object]: event tag attributes.
codesearchnet
def convert_timedelta(duration): days, seconds = duration.days, duration.seconds hours = seconds minutes = (seconds % 3600) seconds = (seconds % 60) return days, hours, minutes, seconds
Summary: Convert duration into component time units Args: :duration (datetime.timedelta): time duration to convert Returns: days, hours, minutes, seconds | TYPE: tuple (integers)
juraj-google-style
def _original_path(self, path): def components_to_path(): if (len(path_components) > len(normalized_components)): normalized_components.extend(path_components[len(normalized_components):]) sep = self._path_separator(path) normalized_path = sep.join(normalized_components) ...
Return a normalized case version of the given path for case-insensitive file systems. For case-sensitive file systems, return path unchanged. Args: path: the file path to be transformed Returns: A version of path matching the case of existing path elements.
codesearchnet
def generate_json_schema(cls, schema, context=DEFAULT_DICT): schema = cls._get_schema(schema) return cls(context=context).dump(schema).data
Generate a JSON Schema from a Marshmallow schema. Args: schema (marshmallow.Schema|str): The Marshmallow schema, or the Python path to one, to create the JSON schema for. Keyword Args: file_pointer (file, optional): The path or pointer to the file to write this schema to. If not provided, the schema will be dumped to...
juraj-google-style
def plot(self, **plot_kwargs: Any) -> None: fig = plt.figure() plt.plot(self._rabi_angles, self._excited_state_probs, 'ro-', figure=fig, **plot_kwargs) plt.xlabel(r"Rabi Angle (Radian)", figure=fig) plt.ylabel('Excited State Probability', figure=fig) fig...
Plots excited state probability vs the Rabi angle (angle of rotation around the x-axis). Args: **plot_kwargs: Arguments to be passed to matplotlib.pyplot.plot.
juraj-google-style
def Print(self, output_writer): if self._date_time_ranges: for date_time_range in self._date_time_ranges: if date_time_range.start_date_time is None: end_time_string = date_time_range.end_date_time.CopyToDateTimeString() output_writer.Write('\t{0:s} after {1:s}\n'.format( ...
Prints a human readable version of the filter. Args: output_writer (CLIOutputWriter): output writer.
juraj-google-style
def ones(shape, dtype=None, **kwargs): data = np.ones(shape, dtype) return dc.array(data, **kwargs)
Create an array of given shape and type, filled with ones. Args: shape (sequence of ints): 2D shape of the array. dtype (data-type, optional): Desired data-type for the array. kwargs (optional): Other arguments of the array (*coords, attrs, and name). Returns: array (decode.array): Decode array filled with ones.
codesearchnet
async def gather(self, *cmds: str) -> Tuple[int]: subprocs = self.spawn(*cmds) subproc_wait_coros = [subproc.wait_done() for subproc in subprocs] return (await asyncio.gather(*subproc_wait_coros))
Coroutine to spawn subprocesses and block until completion. Note: The same `max_concurrency` restriction that applies to `spawn` also applies here. Returns: The exit codes of the spawned subprocesses, in the order they were passed.
codesearchnet
def compile_protofile(proto_file_path): out_file = tempfile.mkstemp()[1] try: subprocess.check_output(['protoc', '--include_source_info', '--descriptor_set_out', out_file, proto_file_path]) except subprocess.CalledProcessError as...
Compile proto file to descriptor set. Args: proto_file_path: Path to proto file to compile. Returns: Path to file containing compiled descriptor set. Raises: SystemExit if the compilation fails.
juraj-google-style
def _click(x, y, button): if button == 'left': try: _sendMouseEvent(MOUSEEVENTF_LEFTCLICK, x, y) except (PermissionError, OSError): pass elif button == 'middle': try: _sendMouseEvent(MOUSEEVENTF_MIDDLECLICK, x, y) except (PermissionError,...
Send the mouse click event to Windows by calling the mouse_event() win32 function. Args: button (str): The mouse button, either 'left', 'middle', or 'right' x (int): The x position of the mouse event. y (int): The y position of the mouse event. Returns: None
juraj-google-style