code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def wait_for_prompt(self, timeout_s=None): with self._cond: if self._prompt: if timeout_s is None: self._cond.wait(3600 * 24 * 365) else: self._cond.wait(timeout_s) if self._response is None: raise PromptUnansweredError return self._response
Wait for the user to respond to the current prompt. Args: timeout_s: Seconds to wait before raising a PromptUnansweredError. Returns: A string response, or the empty string if text_input was False. Raises: PromptUnansweredError: Timed out waiting for the user to respond.
juraj-google-style
def _calc_rms(mol1, mol2, clabel1, clabel2): obmol1 = BabelMolAdaptor(mol1).openbabel_mol obmol2 = BabelMolAdaptor(mol2).openbabel_mol cmol1 = ob.OBMol() for i in clabel1: oa1 = obmol1.GetAtom(i) a1 = cmol1.NewAtom() a1.SetAtomicNum(oa1.GetAt...
Calculate the RMSD. Args: mol1: The first molecule. OpenBabel OBMol or pymatgen Molecule object mol2: The second molecule. OpenBabel OBMol or pymatgen Molecule object clabel1: The atom indices that can reorder the first molecule to uniform atom order clabel1: The atom indices that can reorder the second molecule to un...
juraj-google-style
def to_json_file(self, json_file_path: Union[str, os.PathLike]): with open(json_file_path, 'w', encoding='utf-8') as writer: config_dict = self.to_dict() json_string = json.dumps(config_dict, indent=2, sort_keys=True) + '\n' writer.write(json_string)
Save this instance to a JSON file. Args: json_file_path (Union[str, os.PathLike]): Path to the JSON file in which this configuration instance's parameters will be saved.
github-repos
def quota(self): response = self._call(mm_calls.ClientState, self.uploader_id) client_state = response.body.clientstate_response return (client_state.total_track_count, client_state.locker_track_limit)
Get the uploaded track count and allowance. Returns: tuple: Number of uploaded tracks, number of tracks allowed.
codesearchnet
def Get(self, request, global_params=None): config = self.GetMethodConfig('Get') return self._RunMethod(config, request, global_params=global_params)
Gets information about a snapshot. Args: request: (DataflowProjectsSnapshotsGetRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Snapshot) The response message.
github-repos
def make_message(self, data): data = self.codec.loads(data) msg = Message(data.get('data'), *data.get('args', []), **data.get('kwargs', {})) msg.meta.update(data.get('meta')) self.trigger('make_message', data, msg) return msg
Create a Message instance from data, data will be loaded via munge according to the codec specified in the transport_content_type attribute Returns: Message: message object
codesearchnet
def _replace_tensors_by_numpy_ndarrays(repr_ds_map: rd.RepresentativeDatasetMapping) -> None: with session.Session() as sess: for signature_def_key in repr_ds_map: ds = repr_ds_map[signature_def_key] repr_ds_map[signature_def_key] = rd.replace_tensors_by_numpy_ndarrays(ds, sess)
Replaces tf.Tensors by their evaluated numpy arrays. This assumes that tf.Tensors in representative samples are created in the default Graph. It will raise an error if tensors are created in a different graph. Args: repr_ds_map: SignatureDef key -> RepresentativeDataset mapping.
github-repos
def calc_crc16(buf): crc_table = [0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241, 0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440, 0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40, 0x0a00, 0xcac1...
Drop in pure python replacement for ekmcrc.c extension. Args: buf (bytes): String or byte array (implicit Python 2.7 cast) Returns: str: 16 bit CRC per EKM Omnimeters formatted as hex string.
juraj-google-style
def _get_course_content(course_id, course_url, sailthru_client, site_code, config): cache_key = "{}:{}".format(site_code, course_url) response = cache.get(cache_key) if not response: try: sailthru_response = sailthru_client.api_get("content", {"id": course_url}) if ...
Get course information using the Sailthru content api or from cache. If there is an error, just return with an empty response. Arguments: course_id (str): course key of the course course_url (str): LMS url for course info page. sailthru_client (object): SailthruClient site_code (str): site code config (dict): config ...
juraj-google-style
def asdict_with_event(self): event = threading.Event() with self._lock: self._update_events.add(event) return (self._asdict(), event)
Get a dict representation of this object and an update event. Returns: state: Dict representation of this object. update_event: An event that is guaranteed to be set if an update has been triggered since the returned dict was generated.
codesearchnet
def make_pool3d_tests(pool_op): def f(options, expected_tf_failures=0): test_parameters = [{'ksize': [[1, 1, 1, 1, 1], [1, 2, 2, 2, 1], [1, 2, 3, 4, 1]], 'strides': [[1, 1, 1, 1, 1], [1, 2, 1, 2, 1], [1, 2, 2, 4, 1]], 'input_shape': [[1, 1, 1, 1, 1], [1, 16, 15, 14, 1], [3, 16, 15, 14, 3]], 'paddi...
Make a set of tests to do pooling. Args: pool_op: TensorFlow pooling operation to test i.e. `tf.nn.max_pool3d`. Returns: A function representing the true generator (after curried pool_op).
github-repos
def add_string_pairs_from_text_view_element(xib_file, results, text_view, special_ui_components_prefix): text_view_entry_comment = extract_element_internationalized_comment(text_view) if text_view_entry_comment is None: return if text_view.hasAttribute('usesAttributedText') and text_view.attri...
Adds string pairs from a textview element. Args: xib_file (str): Path to the xib file. results (list): The list to add the results to. text_view(element): The textview element from the xib, to extract the string pairs from. special_ui_components_prefix(str): A custom prefix for internationalize component to allow (def...
juraj-google-style
def acquire(self, uuid_path, subnet=None): try: with self._create_lock(): if subnet: LOGGER.debug('Trying to acquire subnet {}'.format(subnet)) acquired_subnet = self._acquire_given_subnet(uuid_path, subnet) else: LOGGER.debug('Trying t...
Lease a free subnet for the given uuid path. If subnet is given, try to lease that subnet, otherwise try to lease a free subnet. Args: uuid_path (str): Path to the uuid file of a :class:`lago.Prefix` subnet (str): A subnet to lease. Returns: netaddr.IPAddress: An object which represents the subnet. Raises: LagoSubnet...
codesearchnet
def interpolate(features, hparams, decode_hp): (inputs, targets) = (features['inputs'], features['targets']) inputs = tf.unstack(inputs, axis=1) targets = tf.unstack(targets, axis=1) coeffs = np.linspace(0.0, 1.0, decode_hp.num_interp) (first_frame, last_frame) = (inputs[0], targets[(- 1)]) (fir...
Interpolate between the first input frame and last target frame. Args: features: dict of tensors hparams: HParams, training hparams. decode_hp: HParams, decode hparams. Returns: images: interpolated images, 4-D Tensor, shape=(num_interp, H, W, C) first_frame: image, 3-D Tensor, shape=(1, H, W, C) last_frame: image, 3-...
codesearchnet
def write(self, obj: BioCDocument or BioCPassage or BioCSentence): if self.level == DOCUMENT and not isinstance(obj, BioCDocument): raise ValueError if self.level == PASSAGE and not isinstance(obj, BioCPassage): raise ValueError if self.level == SENTENCE and not ...
Encode and write a single object. Args: obj: an instance of BioCDocument, BioCPassage, or BioCSentence Returns:
juraj-google-style
def draw(vertexes, edges): Xs = [] Ys = [] sug = _build_sugiyama_layout(vertexes, edges) for vertex in sug.g.sV: Xs.append(vertex.view.xy[0] - vertex.view.w / 2.0) Xs.append(vertex.view.xy[0] + vertex.view.w / 2.0) Ys.append(vertex.view.xy[1]) ...
Build a DAG and draw it in ASCII. Args: vertexes (list): list of graph vertexes. edges (list): list of graph edges.
juraj-google-style
def cardinal(self, to): return sum((m.cardinal(to) for m in self.submodules))
Return the number of dependencies of this package to the given node. Args: to (Package/Module): target node. Returns: int: number of dependencies.
codesearchnet
def _ReadCharacterDataTypeDefinition(self, definitions_registry, definition_values, definition_name, is_member=False): return self._ReadFixedSizeDataTypeDefinition(definitions_registry, definition_values, data_types.CharacterDefinition, definition_name, self._SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE, is_member=is_...
Reads a character data type definition. Args: definitions_registry (DataTypeDefinitionsRegistry): data type definitions registry. definition_values (dict[str, object]): definition values. definition_name (str): name of the definition. is_member (Optional[bool]): True if the data type definition is a member data type d...
codesearchnet
def __init__(self, unique_identifier=None): super(ActivateRequestPayload, self).__init__( tag=enums.Tags.REQUEST_PAYLOAD) self.unique_identifier = unique_identifier self.validate()
Construct a ActivateRequestPayload object. Args: unique_identifier (UniqueIdentifier): The UUID of a managed cryptographic object.
juraj-google-style
def greater_equal(x, y): return math_ops.greater_equal(x, y)
Element-wise truth value of (x >= y). Args: x: Tensor or variable. y: Tensor or variable. Returns: A bool tensor.
github-repos
def save(obj, filename, protocol=4): with open(filename, 'wb') as f: pickle.dump(obj, f, protocol=protocol)
Serialize an object to disk using pickle protocol. Args: obj: The object to serialize. filename: Path to the output file. protocol: Version of the pickle protocol.
codesearchnet
def xresnet50_2(pretrained=False, **kwargs): model = XResNet(Bottleneck, [3, 4, 6, 3], **kwargs) if pretrained: model.load_state_dict(model_zoo.load_url(model_urls['xresnet50'])) return model
Constructs a XResNet-50 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
juraj-google-style
def CheckHashes(hash_ids): return { k: bool(v) for k, v in data_store.REL_DB.ReadHashBlobReferences(hash_ids).items() }
Checks if files with given hashes are present in the file store. Args: hash_ids: A list of SHA256HashID objects. Returns: A dict where SHA256HashID objects are keys. Corresponding values may be False (if hash id is not present) or True if it is not present.
juraj-google-style
def is_expired(self): expiration_time = (self.created_at + datetime.timedelta(days=1)) return (timezone.now() > expiration_time)
Determine if the confirmation has expired. Returns: bool: ``True`` if the confirmation has expired and ``False`` otherwise.
codesearchnet
def revnet_step(name, x, hparams, reverse=True): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): if hparams.coupling == "additive": coupling_layer = functools.partial( additive_coupling, name="additive", reverse=reverse, mid_channels=hparams.coupling_width, activation=hpa...
One step of glow generative flow. Actnorm + invertible 1X1 conv + affine_coupling. Args: name: used for variable scope. x: input hparams: coupling_width is the only hparam that is being used in this function. reverse: forward or reverse pass. Returns: z: Output of one step of reversible flow.
juraj-google-style
def from_json(cls, json): params = dict(((str(k), v) for (k, v) in json.iteritems() if (k in cls._PARAMS))) if (cls._OFFSET_PARAM in params): params[cls._OFFSET_PARAM] = base64.b64decode(params[cls._OFFSET_PARAM]) return cls(**params)
Creates an instance of the InputReader for the given input shard's state. Args: json: The InputReader state as a dict-like object. Returns: An instance of the InputReader configured using the given JSON parameters.
codesearchnet
def write_build_info(filename, key_value_list): build_info = {} if cuda_config: build_info.update(cuda_config.config) if tensorrt_config: build_info.update(tensorrt_config.config) for arg in key_value_list: key, value = arg.split('=') if value.lower() == 'true': ...
Writes a Python that describes the build. Args: filename: filename to write to. key_value_list: A list of "key=value" strings that will be added to the module's "build_info" dictionary as additional entries.
github-repos
def _send_socket_request(self, xml_request): def to_variant(number): buff = [] while number: byte = number % 128 number = number if number > 0: byte |= 0x80 buff.append(chr(byte)) re...
Send a request via protobuf. Args: xml_request -- A fully formed xml request string for the CPS. Returns: The raw xml response string.
juraj-google-style
def checkTUN(self): packet = self._TUN._tun.read(self._TUN._tun.mtu) return packet
Checks the TUN adapter for data and returns any that is found. Returns: packet: Data read from the TUN adapter
codesearchnet
def with_contextual_override(func: Callable[..., Any]) -> Callable[..., Any]: with contextual_override() as current_context: pass def _func(*args, **kwargs) -> Any: with contextual_override(**current_context): return func(*args, **kwargs) return _func
Wraps a user function with the access to the current contextual override. The wrapped function can be called from another thread. Args: func: The user function to be wrapped. Returns: A wrapper function that have the access to the current contextual override, which can be called from another thread.
github-repos
def __init__(self, auth, api='/logs/search', **kwargs): self.api = api self.log = auth.log try: self.url = '%s%s' % (auth.get_url(), self.api) except AttributeError: self.url = 'https: try: self.auth = auth.get_auth() except A...
Search the logs. Args: auth (Client): Authentication object api (str): Api endpath
juraj-google-style
def read(cls, data): if isinstance(data, OrderedDict): return cls(data) elif isinstance(data, basestring)\ and data.startswith(("http: return cls(request(data)) elif isinstance(data, basestring): try: json_dict = json.load...
Reads data from URL or OrderedDict. Args: data: can be a URL pointing to a JSONstat file, a JSON string or an OrderedDict. Returns: An object of class Collection populated with data.
juraj-google-style
def netmiko_send_config( task: Task, config_commands: Optional[List[str]] = None, config_file: Optional[str] = None, **kwargs: Any ) -> Result: net_connect = task.host.get_connection("netmiko", task.nornir.config) net_connect.enable() if config_commands: result = net_connect.sen...
Execute Netmiko send_config_set method (or send_config_from_file) Arguments: config_commands: Commands to configure on the remote network device. config_file: File to read configuration commands from. kwargs: Additional arguments to pass to method. Returns: Result object with the following attributes set: * result (`...
juraj-google-style
def __type_to_tag(self, type_: Type) -> str: if type_ in scalar_type_to_tag: return scalar_type_to_tag[type_] if is_generic_list(type_): return 'tag:yaml.org,2002:seq' if is_generic_dict(type_): return 'tag:yaml.org,2002:map' if type_ in se...
Convert a type to the corresponding YAML tag. Args: type_: The type to convert Returns: A string containing the YAML tag.
juraj-google-style
def nhapDaiHan(self, cucSo, gioiTinh): for cung in self.thapNhiCung: khoangCach = khoangCachCung(cung.cungSo, self.cungMenh, gioiTinh) cung.daiHan((cucSo + (khoangCach * 10))) return self
Nhap dai han Args: cucSo (TYPE): Description gioiTinh (TYPE): Description Returns: TYPE: Description
codesearchnet
def setup(logdir='log'): logger = logging.getLogger() logger.setLevel(logging.DEBUG) logdir = os.path.normpath(logdir) if (not os.path.exists(logdir)): os.makedirs(logdir) t = datetime.datetime.now() logfile = '{year:04d}{mon:02d}{day:02d}-{hour:02d}{min:02d}{sec:02d}.log'.format(year=t....
Set up dual logging to console and to logfile. When this function is called, it first creates the given directory. It then creates a logfile and passes all log messages to come to it. The logfile name encodes the date and time when it was created, for example "20181115-153559.txt". All messages with a log level of at ...
codesearchnet
def remove_sonos_playlist(self, sonos_playlist): object_id = getattr(sonos_playlist, 'item_id', sonos_playlist) return self.contentDirectory.DestroyObject([('ObjectID', object_id)])
Remove a Sonos playlist. Args: sonos_playlist (DidlPlaylistContainer): Sonos playlist to remove or the item_id (str). Returns: bool: True if succesful, False otherwise Raises: SoCoUPnPException: If sonos_playlist does not point to a valid object.
juraj-google-style
def _ParseValueData(self, knowledge_base, value_data): if not isinstance(value_data, py2to3.UNICODE_TYPE): raise errors.PreProcessFail( 'Unsupported Windows Registry value type: {0:s} for ' 'artifact: {1:s}.'.format( type(value_data), self.ARTIFACT_DEFINITION_NAME)) ...
Parses Windows Registry value data for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. value_data (object): Windows Registry value data. Raises: errors.PreProcessFail: if the preprocessing fails.
juraj-google-style
def add_backend_policy(self, json_data): env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') for job in json.loads(json_data)['job']: for listener in job['listeners']: instance_port = listener['i...
Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert
juraj-google-style
def click_exists(self, timeout=0): e = self.get(timeout=timeout, raise_error=False) if e is None: return False e.click() return True
Wait element and perform click Args: timeout (float): timeout for wait Returns: bool: if successfully clicked
juraj-google-style
def countriesdata(cls, use_live=True): if cls._countriesdata is None: countries = None if use_live: try: countries = hxl.data(cls._ochaurl) except IOError: logger.exception('Download from OCHA feed ...
Read countries data from OCHA countries feed (falling back to file) Args: use_live (bool): Try to get use latest data from web rather than file in package. Defaults to True. Returns: List[Dict[Dict]]: Countries dictionaries
juraj-google-style
def as_saver_def(self): return self.saver_def
Generates a `SaverDef` representation of this saver. Returns: A `SaverDef` proto.
github-repos
def is_mobile_number_portable_region(region_code): metadata = PhoneMetadata.metadata_for_region(region_code, None) if metadata is None: return False return metadata.mobile_number_portable_region
Returns true if the supplied region supports mobile number portability. Returns false for invalid, unknown or regions that don't support mobile number portability. Arguments: region_code -- the region for which we want to know whether it supports mobile number portability or not.
juraj-google-style
def update_with_zero_body(self, uri=None, timeout=-1, custom_headers=None): if not uri: uri = self.data['uri'] logger.debug('Update with zero length body (uri = %s)' % uri) resource_data = self._helper.do_put(uri, None, timeout, custom_headers) return resource_data
Makes a PUT request to update a resource when no request body is required. Args: uri: Allows to use a different URI other than resource URI timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. custom_headers: Al...
juraj-google-style
def make_state_space_model(self, num_timesteps, param_vals=None, initial_state_prior=None, initial_step=0): return self._make_state_space_model(num_timesteps=num_timesteps, param_map=self._canonicalize_param_vals_as_map(param_vals), initial_state_prior=initial_state_prior, initial_step=initial_step)
Instantiate this model as a Distribution over specified `num_timesteps`. Args: num_timesteps: Python `int` number of timesteps to model. param_vals: a list of `Tensor` parameter values in order corresponding to `self.parameters`, or a dict mapping from parameter names to values. initial_state_prior: an optional `Distr...
codesearchnet
class AlphaDropout(Layer): def __init__(self, rate, noise_shape=None, seed=None, **kwargs): super().__init__(**kwargs) if not 0 <= rate <= 1: raise ValueError(f'Invalid value received for argument `rate`. Expected a float value between 0 and 1. Received: rate={rate}') self.rate ...
Applies Alpha Dropout to the input. Alpha Dropout is a `Dropout` that keeps mean and variance of inputs to their original values, in order to ensure the self-normalizing property even after this dropout. Alpha Dropout fits well to Scaled Exponential Linear Units (SELU) by randomly setting activations to the negative s...
github-repos
def __init__(self, default: typing.Optional[float]=MISSING_VALUE, min_value: typing.Optional[float]=None, max_value: typing.Optional[float]=None, is_noneable: bool=False, frozen: bool=False): super().__init__(float, default, min_value, max_value, is_noneable, frozen)
Constructor. Args: default: (Optional) default value for this spec. min_value: (Optional) minimum value of acceptable values. max_value: (Optional) maximum value of acceptable values. is_noneable: If True, None is acceptable. frozen: If True, values other than the default value is not accceptable.
github-repos
def _construct_forward_backward(self, num_doutputs): trainable_outputs = [output for output in self._func_graph.outputs[:num_doutputs] if backprop_util.IsTrainable(output)] signature = [] for t in trainable_outputs: signature.append(tensor_lib.TensorSpec(*default_gradient.shape_and_dtype(t))) d...
Constructs a pair of forward and backward functions. Args: num_doutputs: The constructed backprop function will take output gradients for the first `num_doutputs` outputs of the forward function. Defaults to the number of outputs for the inference function, but when higher-order gradients are computed this will increa...
github-repos
def _FormatValue(self, value, level=0): def FormatDictItem(key_value): 'Formats single dictionary item.' (key, value) = key_value return ((self._FormatValue(key, (level + 1)) + ': ') + self._FormatValue(value, (level + 1))) def LimitedEnumerate(items, formatter, level=0): 'Retu...
Pretty-prints an object for a logger. This function is very similar to the standard pprint. The main difference is that it enforces limits to make sure we never produce an extremely long string or take too much time. Args: value: Python object to print. level: current recursion level. Returns: Formatted string.
codesearchnet
def catch(func, *args, **kwargs): try: func(*args, **kwargs) except Exception as e: return e
Call the supplied function with the supplied arguments, catching and returning any exception that it throws. Arguments: func: the function to run. *args: positional arguments to pass into the function. **kwargs: keyword arguments to pass into the function. Returns: If the function throws an exception, return the excep...
codesearchnet
def cancel(self, invoice_id, **kwargs): url = "{}/{}/cancel".format(self.base_url, invoice_id) return self.post_url(url, {}, **kwargs)
Cancel an unpaid Invoice with given ID via API It can only be called on an invoice that is not in the paid state. Args: invoice_id : Id for cancel the invoice Returns: The response for the API will be the invoice entity, similar to create/update API response, with status attribute's value as cancelled
juraj-google-style
def _get_example(filepath: str, filename: str, tag: Tag, sdk: int) -> Example: context_line = tag.context_line if tag.context_line <= tag.line_start else tag.context_line - (tag.line_finish - tag.line_start) return Example(sdk=SdkEnum(sdk), tag=tag, filepath=filepath, status=STATUS_UNSPECIFIED, type=_get_object...
Return an Example by filepath and filename. Args: filepath: path of the example's file. filename: name of the example's file. tag: tag of the example. Returns: Parsed Example object.
github-repos
def read(self, domain, type_name, search_command, body=None): return self._request(domain, type_name, search_command, 'GET', body)
Read entry in ThreatConnect Data Store Args: domain (string): One of 'local', 'organization', or 'system'. type_name (string): This is a free form index type name. The ThreatConnect API will use this resource verbatim. search_command (string): Search command to pass to ES. body (str): JSON body
juraj-google-style
def _build(self, inputs, prev_state): input_size = inputs.get_shape()[1] weight_shape = (input_size, self._hidden_size) u_shape = (self._hidden_size, self._hidden_size) bias_shape = (self._hidden_size,) def _get_variable(name, shape): return tf.get_variable(name, shape, dtype=inputs.dtype, ...
Connects the highway core module into the graph. Args: inputs: Tensor of size `[batch_size, input_size]`. prev_state: Tensor of size `[batch_size, hidden_size]`. Returns: A tuple (output, next_state) where `output` is a Tensor of size `[batch_size, hidden_size]` and `next_state` is a Tensor of size `[batch_size, hidd...
codesearchnet
def get_block_details(self, block_ids): if (not hasattr(block_ids, '__iter__')): block_ids = [block_ids] for _id in block_ids: block_key = self._db.get_block(_id)[0] block_data = self._db.get_all_field_value(block_key) for key in block_data: for char in ['[', '{']: ...
Get details of scheduling or processing block Args: block_ids (list): List of block IDs
codesearchnet
def weak_scaling(timing_stats, scaling_var, data_points): timing_data = dict() proc_counts = [] bench_means = [] bench_mins = [] bench_maxs = [] model_means = [] model_mins = [] model_maxs = [] for point in data_points: size = point[0] proc = point[1] try: ...
Generate data for plotting weak scaling. The data points keep a constant amount of work per processor for each data point. Args: timing_stats: the result of the generate_timing_stats function scaling_var: the variable to select from the timing_stats dictionary (can be provided in configurations via the 'scaling_var' ...
codesearchnet
def set_lock_config(self, device_label, volume=None, voice_level=None, auto_lock_enabled=None): response = None data = {} if volume: data['volume'] = volume if voice_level: data['voiceLevel'] = voice_level if auto_lock_enab...
Set lock configuration Args: device_label (str): device label of lock volume (str): 'SILENCE', 'LOW' or 'HIGH' voice_level (str): 'ESSENTIAL' or 'NORMAL' auto_lock_enabled (boolean): auto lock enabled
juraj-google-style
def download_image(self, device_label, image_id, file_name): response = None try: response = requests.get( urls.download_image(self._giid, device_label, image_id), headers={ 'Cookie': 'vid={}'.format(self._vid)}, st...
Download image taken by a smartcam Args: device_label (str): device label of camera image_id (str): image id from image series file_name (str): path to file
juraj-google-style
def get_eligible_features(examples, num_mutants): features_dict = ( get_numeric_features_to_observed_range( examples)) features_dict.update( get_categorical_features_to_sampling( examples, num_mutants)) features_list = [] for k, v in sorted(features_dict.items()): ...
Returns a list of JSON objects for each feature in the examples. This list is used to drive partial dependence plots in the plugin. Args: examples: Examples to examine to determine the eligible features. num_mutants: The number of mutations to make over each feature. Returns: A list with a JSON object for each featu...
juraj-google-style
def _parse_compound_info(self, line): for (k, regexes) in six.iteritems(self.compound_regex): for reg in regexes: if self.compound_info[k]: continue m = re.search(reg, line, re.IGNORECASE) if m: self.compound_info[k] = m.group(1).strip() ...
Parse and extract all compound data by looping through the dictionary of compound_info regexs updates self.compound_info Args: line (str): line of the msp file
codesearchnet
def _make_intermediates_match(branch_graphs, branch_optionals): new_branch_optionals = [] intermediates_size = max((len(o) for o in branch_optionals)) for i, branch_graph in enumerate(branch_graphs): other_optionals = _create_none_optionals(branch_graph, intermediates_size - len(branch_optionals[i])...
Returns new optionals lists that have matching signatures. This is done by mirroring each list in the other using none optionals. There is no merging of like optionals. Args: branch_graphs: `list` of `FuncGraph`. branch_optionals: `list` of `list`s of optional `Tensor`s from other branch_graphs Returns: A `list` of ...
github-repos
def _check_for_definition(iface, cls, tag, defines): attributes = (attr for attr in iface.__abstractmethods__ if hasattr(getattr(iface, attr), tag)) for attribute in attributes: for node in cls.__mro__: if (hasattr(node, attribute) and defines(getattr(node, attribute))): retu...
Check for a valid definition of a value. Args: iface (Iface): An Iface specification. cls (type): Some type to check for a definition. tag (str): The name of the tag attribute used to mark the abstract methods. defines (callable): A callable that accepts an attribute and returns True if the attribute is a valid defini...
codesearchnet
def inject_argument_info_in_traceback(fn, object_name=None): if backend.backend() == 'tensorflow': from tensorflow import errors as tf_errors else: tf_errors = None @wraps(fn) def error_handler(*args, **kwargs): if not is_traceback_filtering_enabled(): return fn(*arg...
Add information about call argument values to an error message. Arguments: fn: Function to wrap. Exceptions raised by the this function will be re-raised with additional information added to the error message, displaying the values of the different arguments that the function was called with. object_name: String, disp...
github-repos
def forward(self, hidden_states: torch.Tensor, position_embeddings: Optional[torch.Tensor]=None, reference_points=None, spatial_shapes=None, spatial_shapes_list=None, encoder_hidden_states: Optional[torch.Tensor]=None, encoder_attention_mask: Optional[torch.Tensor]=None, output_attentions: Optional[bool]=False) -> tupl...
Args: hidden_states (`torch.FloatTensor`): Input to the layer of shape `(seq_len, batch, embed_dim)`. position_embeddings (`torch.FloatTensor`, *optional*): Position embeddings that are added to the queries and keys in the self-attention layer. reference_points (`torch.FloatTensor`, *optional*): Reference points. spati...
github-repos
def pseudo_with_symbol(self, symbol, allow_multi=False): pseudos = self.select_symbols(symbol, ret_list=True) if ((not pseudos) or ((len(pseudos) > 1) and (not allow_multi))): raise ValueError(('Found %d occurrences of symbol %s' % (len(pseudos), symbol))) if (not allow_multi): return pseudo...
Return the pseudo with the given chemical symbol. Args: symbols: String with the chemical symbol of the element allow_multi: By default, the method raises ValueError if multiple occurrences are found. Use allow_multi to prevent this. Raises: ValueError if symbol is not found or multiple occurences are present and not...
codesearchnet
def __init__(self, formatter, object_representer): super().__init__(formatter) self.object_representer = object_representer logger.debug('obj loader set')
Initialize formatter and object representer. Args: formatter: Callable object/function that will format object loaded from in file. Formatter signature: iterable = formatter(iterable) object_representer: An ObjectRepresenter instance.
juraj-google-style
def stop_server(self, grace=1.0): self._server_lock.acquire() try: if not self._server_started: raise ValueError('Server has not started running') if self._stop_requested: raise ValueError('Server has already stopped') self._stop_requested = True return se...
Request server stopping. Once stopped, server cannot be stopped or started again. This method is non-blocking. Call `wait()` on the returned event to block until the server has completely stopped. Args: grace: Grace period in seconds to be used when calling `server.stop()`. Raises: ValueError: If server stop has alr...
github-repos
def _AddEdge(self, start_node, end_node): self.graph[start_node].outgoing.append(end_node) if (end_node in self.graph): self.graph[end_node].incoming.append(start_node)
Add a directed edge to the graph. Add the end to the list of outgoing nodes of the start and the start to the list of incoming nodes of the end node. Args: start_node: name of the start node end_node: name of the end node
codesearchnet
def put(self, key, value): if value is None: self.delete(key) else: self._collection(key)[key] = value
Stores the object `value` named by `key`. Stores the object in the collection corresponding to ``key.path``. Args: key: Key naming `value` value: the object to store.
juraj-google-style
def aggregate(self): (_, indices, inverse) = np.unique(self.record.sample, axis=0, return_index=True, return_inverse=True) order = np.argsort(indices) indices = indices[order] record = self.record[indices] record.num_occurrences = 0 for (old_idx, new_idx) in enumerate(inverse): new_idx =...
Create a new SampleSet with repeated samples aggregated. Returns: :obj:`.SampleSet` Note: :attr:`.SampleSet.record.num_occurrences` are accumulated but no other fields are.
codesearchnet
def get_task_scfcycles(self, nids=None, wslice=None, task_class=None, exclude_ok_tasks=False): select_status = [self.S_RUN] if exclude_ok_tasks else [self.S_RUN, self.S_OK] tasks_cycles = [] for task in self.select_tasks(nids=nids, wslice=wslice): if task.statu...
Return list of (taks, scfcycle) tuples for all the tasks in the flow with a SCF algorithm e.g. electronic GS-SCF iteration, DFPT-SCF iterations etc. Args: nids: List of node identifiers. wslice: Slice object used to select works. task_class: String or class used to select tasks. Ignored if None. exclude_ok_tasks: True...
juraj-google-style
def get_country_by_name(self, country_name: str) -> typing.Optional['Country']: VALID_STR.validate(country_name, 'get_country_by_name', exc=ValueError) if country_name not in self._countries_by_name.keys(): for country in self.countries: if country.country_name == c...
Gets a country from its name Args: country_name: country name Returns: Country
juraj-google-style
def sampling_query(sql, fields=None, count=5, sampling=None): if (sampling is None): sampling = Sampling.default(count=count, fields=fields) return sampling(sql)
Returns a sampling query for the SQL object. Args: sql: the SQL object to sample fields: an optional list of field names to retrieve. count: an optional count of rows to retrieve which is used if a specific sampling is not specified. sampling: an optional sampling strategy to apply to the table. Returns: A SQL query s...
codesearchnet
async def update( self, service_id: str, version: str, *, image: str = None, rollback: bool = False ) -> bool: if image is None and rollback is False: raise ValueError("You need to specify an image.") inspect_service = await self....
Update a service. If rollback is True image will be ignored. Args: service_id: ID or name of the service. version: Version of the service that you want to update. rollback: Rollback the service to the previous service spec. Returns: True if successful.
juraj-google-style
def issue_closed(issue_key, server=None, username=None, password=None): if (not issue_key): return None jira_ = _get_jira(server=server, username=username, password=password) try: ticket = jira_.issue(issue_key) except jira.exceptions.JIRAError: return None return (ticket.fie...
Check if the issue is closed. issue_key The JIRA iD of the ticket to close. Returns: - ``True``: the ticket exists and it is closed. - ``False``: the ticket exists and it has not been closed. - ``None``: the ticket does not exist. CLI Example: .. code-block:: bash salt '*' jira.issue_closed NE-123
codesearchnet
def fn_with_code_in_docstring(): return True
This has code in the docstring. Example: x = fn_with_code_in_docstring() indentation_matters = True Returns: True.
github-repos
def update_scores(self, scores: torch.FloatTensor, g_values: torch.FloatTensor) -> torch.FloatTensor: _, _, depth = g_values.shape probs = torch.softmax(scores, dim=1) for i in range(depth): g_values_at_depth = g_values[:, :, i] g_mass_at_depth = (g_values_at_depth * probs).sum(axis=1, keepd...
Updates scores using the g values. We assume that the scores are in the log space. Args: scores (`torch.FloatTensor`): Scores (batch_size, vocab_size). g_values (`torch.FloatTensor`): G values (batch_size, vocab_size, depth). Returns: Updated scores (batch_size, vocab_size).
github-repos
def num_samples(self, dataset_split): return {problem.DatasetSplit.TRAIN: 1000000, problem.DatasetSplit.EVAL: 10000, problem.DatasetSplit.TEST: 10000}[dataset_split]
Determine the dataset sized given a dataset_split. Args: dataset_split: A problem.DatasetSplit. Returns: The desired number of samples for this dataset_split.
codesearchnet
def delete(self, interface, vrid): vrrp_str = ('no vrrp %d' % vrid) return self.configure_interface(interface, vrrp_str)
Deletes a vrrp instance from an interface Note: This method will attempt to delete the vrrp from the node's operational config. If the vrrp does not exist on the interface then this method will not perform any changes but still return True Args: interface (string): The interface to configure. vrid (integer): The vrid...
codesearchnet
def assert_rank_in(x, ranks, data=None, summarize=None, message=None, name=None): with ops.name_scope(name, 'assert_rank_in', (x,) + tuple(ranks) + tuple(data or [])): if not isinstance(x, sparse_tensor.SparseTensor): x = ops.convert_to_tensor(x, name='x') ranks = tuple([ops.convert_to_t...
Assert `x` has rank in `ranks`. Example of adding a dependency to an operation: ```python with tf.control_dependencies([tf.compat.v1.assert_rank_in(x, (2, 4))]): output = tf.reduce_sum(x) ``` Args: x: Numeric `Tensor`. ranks: Iterable of scalar `Tensor` objects. data: The tensors to print out if the condition is ...
github-repos
def select_serial_number_row(self, serial_number): sheet = self.table col = self.db_sheet_cols.id rows = (sheet.loc[(:, col)] == serial_number) return sheet.loc[(rows, :)]
Select row for identification number serial_number Args: serial_number: serial number Returns: pandas.DataFrame
codesearchnet
def add(self, arg, options=None): fut = tasklets.Future(('%s.add(%s, %s)' % (self, arg, options))) todo = self._queues.get(options) if (todo is None): utils.logging_debug('AutoBatcher(%s): creating new queue for %r', self._todo_tasklet.__name__, options) if (not self._queues): ev...
Adds an arg and gets back a future. Args: arg: one argument for _todo_tasklet. options: rpc options. Return: An instance of future, representing the result of running _todo_tasklet without batching.
codesearchnet
def check_config_attributes_being_used(config_class): signature = dict(inspect.signature(config_class.__init__).parameters) parameter_names = [x for x in list(signature.keys()) if x not in ['self', 'kwargs']] parameter_defaults = [signature[param].default for param in parameter_names] reversed_attribute...
Check the arguments in `__init__` of `config_class` are used in the modeling files in the same directory Args: config_class (`type`): The configuration class for which the arguments in its `__init__` will be checked.
github-repos
def create(cls, session, attributes=None, relationships=None): resource_type = cls._resource_type() resource_path = cls._resource_path() url = session._build_url(resource_path) json = build_request_body(resource_type, None, attributes=attributes, relationships=relationships) process = cls._mk_one(se...
Create a resource of the resource. This should only be called from sub-classes Args: session(Session): The session to create the resource in. attributes(dict): Any attributes that are valid for the given resource type. relationships(dict): Any relationships that are valid for the given resource type. Returns: Re...
codesearchnet
def start(self, name: str, increment_count: bool = True) -> None: if not self._timing: return now = get_now_utc_pendulum() if self._stack: last = self._stack[-1] self._totaldurations[last] += now - self._starttimes[last] if...
Start a named timer. Args: name: name of the timer increment_count: increment the start count for this timer
juraj-google-style
def get(cls, keyval, key='id', user_id=None): if (keyval is None): return None if ((key in cls.__table__.columns) and cls.__table__.columns[key].primary_key): return cls.query.get(keyval) else: result = cls.query.filter((getattr(cls, key) == keyval)) return result.first()
Fetches a single instance which has value `keyval` for the attribute `key`. Args: keyval: The value of the attribute. key (str, optional): The attribute to search by. By default, it is 'id'. Returns: A model instance if found. Else None. Examples: >>> User.get(35) user35@i.com >>> User.get('user35@i.com', key=...
codesearchnet
def safe_rt(resource_type, lower=False): if (resource_type is not None): resource_type = resource_type.replace(' ', '_') if lower: resource_type = resource_type.lower() return resource_type
Format the Resource Type. Takes Custom Indicator types with a space character and return a *safe* string. (e.g. *User Agent* is converted to User_Agent or user_agent.) Args: resource_type (string): The resource type to format. lower (boolean): Return type in all lower case Returns: (string): The formatted resource ...
codesearchnet
def start(self, hostname=None, port=None, templates_path=None): self.hostname = (hostname if hostname else 'localhost') if port: self.port = port elif (not self.port): self.port = unused_port(self.hostname) if templates_path: self.loaders.insert(0, jinja2.FileSystemLoader(templat...
Starts the web interface. Args: hostname (str, optional): host name to listen from. (Default value = None) port (int, optional): port to listen from. (Default value = None) templates_path (str, optional): path to look for templates. (Default value = None)
codesearchnet
def makefile(self): return self.env.get_template('Makefile.j2').render(metadata=self.metadata, package=self.package)
Generate the documentation Makefile. Returns: (str): the contents of the `Makefile`.
codesearchnet
def get_what_follows_raw(s: str, prefix: str, onlyatstart: bool=True, stripwhitespace: bool=True) -> Tuple[(bool, str)]: prefixstart = s.find(prefix) if (((prefixstart == 0) and onlyatstart) or ((prefixstart != (- 1)) and (not onlyatstart))): resultstart = (prefixstart + len(prefix)) result = s[...
Find the part of ``s`` that is after ``prefix``. Args: s: string to analyse prefix: prefix to find onlyatstart: only accept the prefix if it is right at the start of ``s`` stripwhitespace: remove whitespace from the result Returns: tuple: ``(found, result)``
codesearchnet
def prepend(self, key, value, expire=0, noreply=None): if noreply is None: noreply = self.default_noreply return self._store_cmd(b'prepend', {key: value}, expire, noreply)[key]
The memcached "prepend" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_no...
juraj-google-style
def tanh(x): return ops.tanh(x)
Hyperbolic tangent activation function. It is defined as: `tanh(x) = sinh(x) / cosh(x)`, i.e. `tanh(x) = ((exp(x) - exp(-x)) / (exp(x) + exp(-x)))`. Args: x: Input tensor.
github-repos
def disconnect_sync(self, connection_handle): self.bable.disconnect(connection_handle=connection_handle, sync=True)
Synchronously disconnect from whoever has connected to us Args: connection_handle (int): The handle of the connection we wish to disconnect.
juraj-google-style
def predict_on_batch(self, x): self._check_call_args('predict_on_batch') if self._distribution_strategy and distribute_lib.in_cross_replica_context(): raise NotImplementedError('`predict_on_batch` is not supported for models distributed with tf.distribute.Strategy.') inputs, _, _ = self._standardize...
Returns predictions for a single batch of samples. Args: x: Input data. It could be: - A Numpy array (or array-like), or a list of arrays (in case the model has multiple inputs). - A TensorFlow tensor, or a list of tensors (in case the model has multiple inputs). - A `tf.data` dataset. Returns: Numpy array(s) of pred...
github-repos
def replace(self, pattern, replacement): for (i, line) in enumerate(self): if (pattern in line): self[i] = line.replace(pattern, replacement)
Replace all instances of a pattern with a replacement. Args: pattern (str): Pattern to replace replacement (str): Text to insert
codesearchnet
def _insert_layers(self, layers, relevant_nodes=None): layers = nest.flatten(layers) tf_utils.assert_no_legacy_layers(layers) node_to_depth = {} for depth, nodes in self._nodes_by_depth.items(): node_to_depth.update({node: depth for node in nodes}) if not relevant_nodes: relevant_nod...
Inserts Layers into the Network after Network creation. This is only valid for Keras Graph Networks. Layers added via this function will be included in the `call` computation and `get_config` of this Network. They will not be added to the Network's outputs. Args: layers: Arbitrary nested structure of Layers. Layers...
github-repos
def CheckAltTokens(filename, clean_lines, linenum, error): line = clean_lines.elided[linenum] if Match(r'^\s* return if line.find('') >= 0: return for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line): error(filename, linenum, 'readability/alt_tokens', 2, ...
Check alternative keywords being used in boolean expressions. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
juraj-google-style
def parse_args(self, arglist=None): args = self._parser.parse_args(args=arglist) sub_cmd = args.loam_sub_name if (sub_cmd is None): for (opt, sct) in self._opt_bare.items(): self._conf[sct][opt] = getattr(args, opt, None) else: for (opt, sct) in self._opt_cmds[sub_cmd].items(...
Parse arguments and update options accordingly. Args: arglist (list of str): list of arguments to parse. If set to None, ``sys.argv[1:]`` is used. Returns: :class:`Namespace`: the argument namespace returned by the :class:`argparse.ArgumentParser`.
codesearchnet
def convert_to_rgb(video: np.array, data_format: Optional[ChannelDimension]=None, input_data_format: Optional[Union[str, ChannelDimension]]=None) -> np.array: if not isinstance(video, np.ndarray): raise ValueError(f'Video has to be a numpy array to convert to RGB format, but found {type(video)}') if inp...
Convert video to RGB by blending the transparency layer if it's in RGBA format, otherwise simply returns it. Args: video (`np.array`): The video to convert. data_format (`ChannelDimension`, *optional*): The channel dimension format of the output video. If unset, will use the inferred format from the input. input_data_...
github-repos
def compose(*coros): coros = list(coros) @asyncio.coroutine def reducer(acc, coro): return (yield from coro(acc)) @asyncio.coroutine def wrapper(acc): return (yield from reduce(reducer, coros, initializer=acc, right=True)) return wrapper
Creates a coroutine function based on the composition of the passed coroutine functions. Each function consumes the yielded result of the coroutine that follows. Composing coroutine functions f(), g(), and h() would produce the result of f(g(h())). Arguments: *coros (coroutinefunction): variadic coroutine functions ...
codesearchnet