code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _Matches(path, pattern_list): return any((fnmatch.fnmatchcase(path, pattern) for pattern in pattern_list))
Returns true if path matches any patten found in pattern_list. Args: path: A dot separated path to a package, class, method or variable pattern_list: A list of wildcard patterns Returns: True if path matches any wildcard found in pattern_list.
codesearchnet
def readSchedules(self, tableset): self.setContext('readSchedules') try: req_table = binascii.hexlify(str(tableset).zfill(1)) req_str = (('01523102303037' + req_table) + '282903') self.request(False) req_crc = self.calc_crc16(req_str[2:].decode('hex')) req_str += req_crc ...
Serial call to read schedule tariffs buffer Args: tableset (int): :class:`~ekmmeters.ReadSchedules` buffer to return. Returns: bool: True on completion and ACK.
codesearchnet
def load_extra_data(cls, data): try: cls._extra_config.update(json.loads(data)) except ValueError as exception: sys.stderr.write('Could convert to JSON. {0:s}'.format(exception)) exit((- 1))
Loads extra JSON configuration parameters from a data buffer. The data buffer must represent a JSON object. Args: data: str, the buffer to load the JSON data from.
codesearchnet
def aggregate_single_gradient(grad_and_vars, use_mean, check_inf_nan): grads = [g for (g, _) in grad_and_vars] grad = tf.add_n(grads) if (use_mean and (len(grads) > 1)): grad = tf.multiply(grad, (1.0 / len(grads))) v = grad_and_vars[0][1] if check_inf_nan: has_nan_or_inf = tf.logical...
Calculate the average gradient for a shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: grad_and_vars: A list or tuple of (gradient, variable) tuples. Each (gradient, variable) pair within the outer list represents the gradient of the variable calculat...
codesearchnet
def model_from_path(model_path, fuzziness=False): app_name = '.'.join(model_path.split('.')[:(- 1)]) model_name = model_path.split('.')[(- 1)] if (not app_name): return None module = importlib.import_module(app_name) try: model = getattr(module, model_name) except AttributeError:...
Find the model class for a given model path like 'project.app.model' Args: path (str): dot-delimited model path, like 'project.app.model' Returns: Django Model-based class
codesearchnet
def _CreateRouteTripsFolder(self, parent, route, style_id=None, schedule=None): if (not route.trips): return None trips = list(route.trips) trips.sort(key=(lambda x: x.trip_id)) trips_folder = self._CreateFolder(parent, 'Trips', visible=False) for trip in trips: if (self.date_filter ...
Create a KML Folder containing all the trips in the route. The folder contains a placemark for each of these trips. If there are no trips in the route, no folder is created and None is returned. Args: parent: The parent ElementTree.Element instance. route: The transitfeed.Route instance. style_id: A style id string f...
codesearchnet
def list_skus(access_token, subscription_id, location, publisher, offer): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Compute/', 'locations/', location, '/publishers/', publisher, '/artifacttypes/vmimage/offers/', offer, '/skus?api-version=', COMP_API]) retur...
List available VM image skus for a publisher offer. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. location (str): Azure data center location. E.g. westus. publisher (str): VM image publisher. E.g. MicrosoftWindowsServer. offer (str): VM image offer. E.g. Wi...
codesearchnet
def split(self, value, lengths, name=None): return self._implementation.split(value, lengths, name=name)
Split the values of a `Tensor` into the TensorArray. Args: value: (N+1)-D. Tensor of type `dtype`. The Tensor to split. lengths: 1-D. int32 vector with the lengths to use when splitting `value` along its first dimension. name: A name for the operation (optional). Returns: A new TensorArray object with flow that en...
github-repos
def JoinKeyPath(path_segments): path_segments = [ segment.split(definitions.KEY_PATH_SEPARATOR) for segment in path_segments] path_segments = [ element for sublist in path_segments for element in sublist] path_segments = filter(None, path_segments) key_path = definitions....
Joins the path segments into key path. Args: path_segments (list[str]): Windows Registry key path segments. Returns: str: key path.
juraj-google-style
def LoadConfig(configuration): parser = ConfigParser() configuration.log.debug('Attempting to parse configuration file: %s', configuration.config_file) parser.read(configuration.config_file) default = 'DEFAULT' default_source = FixValue(parser.get(default, Config.OPT_SOURCE)) default_cache = Fix...
Load the on-disk configuration file and merge it into config. Args: configuration: a config.Config object Raises: error.NoConfigFound: no configuration file was found
github-repos
def ParseFileObject(self, parser_mediator, file_object): file_header_map = self._GetDataTypeMap('asl_file_header') try: file_header, _ = self._ReadStructureFromFileObject( file_object, 0, file_header_map) except (ValueError, errors.ParseError) as exception: raise errors.UnableToP...
Parses an ASL file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
juraj-google-style
def _on_channel_open(self, channel): channel.add_on_close_callback(self._on_channel_close) channel.add_on_cancel_callback(self._on_cancel) channel.basic_qos(callback=self._on_qosok, **config.conf["qos"])
Callback used when a channel is opened. This registers all the channel callbacks. Args: channel (pika.channel.Channel): The channel that successfully opened.
juraj-google-style
def GetSubkeyByIndex(self, index): if not self._registry_key and self._registry: self._GetKeyFromRegistry() subkeys = list(self._subkeys.values()) if index < 0 or index >= len(subkeys): raise IndexError('Index out of bounds.') return subkeys[index]
Retrieves a subkey by index. Args: index (int): index of the subkey. Returns: WinRegistryKey: Windows Registry subkey or None if not found. Raises: IndexError: if the index is out of bounds.
juraj-google-style
def partition(self, id_): from ..orm import Partition as OrmPartition from sqlalchemy import or_ from ..identity import PartialPartitionName if isinstance(id_, PartitionIdentity): id_ = id_.id_ elif isinstance(id_, PartialPartitionName): id_ = id_.promote(self.bundle.identity.name) ...
Get a partition by the id number. Arguments: id_ -- a partition id value Returns: A partitions.Partition object Throws: a Sqlalchemy exception if the partition either does not exist or is not unique Because this method works on the bundle, the id_ ( without version information ) is equivalent to the vid ( with vers...
codesearchnet
def on_success(self, inv_plugin, emit_set_slot): self.dirty = set() self.apply(inv_plugin) for changed_slot in self.dirty: emit_set_slot(changed_slot)
Called when the click was successful and should be applied to the inventory. Args: inv_plugin (InventoryPlugin): inventory plugin instance emit_set_slot (func): function to signal a slot change, should be InventoryPlugin().emit_set_slot
codesearchnet
def housekeeping(self, **kwargs): path = ('/projects/%s/housekeeping' % self.get_id()) self.manager.gitlab.http_post(path, **kwargs)
Start the housekeeping task. Args: **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabHousekeepingError: If the server failed to perform the request
codesearchnet
def assignHolidayDate(self, holiday, month, day): holiday += 1 if (month > 12) or (month < 0) or (day > 31) or (day < 0) or (holiday < 1) or (holiday > Extents.Holidays): ekm_log("Out of bounds: month " + str(month) + " day " + str(day) + " holiday " + str(holiday)) retu...
Set a singe holiday day and month in object buffer. There is no class style enum for holidays. Args: holiday (int): 0-19 or range(Extents.Holidays). month (int): Month 1-12. day (int): Day 1-31 Returns: bool: True on completion.
juraj-google-style
def delete(self, **options): fut = delete_async(self.key(), **options) fut.get_result()
Permanently delete this blob from Blobstore. Args: **options: Options for create_rpc().
codesearchnet
def send_msg_to_webhook(self, message): payload = { 'content':message } header = { 'Content-Type':'application/json' } try: request = requests.post( self.api_url, headers=header, json=p...
separated Requests logic for easier testing Args: message (str): actual logging string to be passed to REST endpoint Todo: * Requests.text/json return for better testing options
juraj-google-style
def _decompose_and_get_unitary(val: Union[('cirq.Operation', 'cirq.Gate')]) -> np.ndarray: from cirq.protocols.apply_unitary import apply_unitary, ApplyUnitaryArgs from cirq.protocols.decompose import decompose_once, decompose_once_with_qubits from cirq import Gate, LineQubit, Operation if isinstance(va...
Try to decompose a cirq.Operation or cirq.Gate, and return its unitary if it exists. Returns: If `val` can be decomposed into unitaries, calculate the resulting unitary and return it. If it doesn't exist, None is returned.
codesearchnet
def CopyFromDateTimeString(self, time_string): date_time_values = self._CopyDateTimeFromString(time_string) year = date_time_values.get('year', 0) month = date_time_values.get('month', 0) day_of_month = date_time_values.get('day_of_month', 0) hours = date_time_values.get('hours', 0) minutes = da...
Copies a Delphi TDateTime timestamp from a string. Args: time_string (str): date and time value formatted as: YYYY-MM-DD hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The time of day, seconds fraction and time zone offset are optional. Th...
codesearchnet
def set_local_interface(self, value=None, default=False, disable=False): return self._configure_mlag('local-interface', value, default, disable)
Configures the mlag local-interface value Args: value (str): The value to configure the local-interface default (bool): Configures the local-interface using the default keyword disable (bool): Negates the local-interface using the no keyword Returns: bool: Returns True if the commands complete successfully
juraj-google-style
def match_date(date): date_pattern = re.compile("^(19|20)\d\d[- /.](0[1-9]|1[012])[- /.](0[1-9]|[12][0-9]|3[01])") if re.match(date_pattern, date): return True return False
Check if a string is a valid date Args: date(str) Returns: bool
juraj-google-style
def profile_operations(self, options): opts = _build_options(options) tfprof_node = tfprof_output_pb2.MultiGraphNodeProto() try: tfprof_node.ParseFromString(print_mdl.Profile('op'.encode('utf-8'), opts.SerializeToString())) except message.DecodeError as e: sys.stderr.write('Cannot parse ...
Profile the statistics of the Operation types (e.g. MatMul, Conv2D). Args: options: A dict of options. See core/profiler/g3doc/options.md. Returns: a MultiGraphNodeProto that records the results.
github-repos
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): tstream = BytearrayStream() self.hashing_algorithm.write(tstream, kmip_version=kmip_version) self.digest_value.write(tstream, kmip_version=kmip_version) self.key_format_type.write(tstream, kmip_version=kmip_vers...
Write the data encoding the Digest object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMI...
juraj-google-style
def show(self, objtype, objid): url = self._object_url(objtype, int(objid)) return self._make_request(url, method="get")
Query for a specific resource by ID Args: objtype (str): object type, e.g. 'device', 'interface' objid (int): object ID (DeviceID, etc.) Returns: A dict with that object Raises: requests.exceptions.HTTPError
juraj-google-style
def wait_for_stop(self, timeout=None): return self._stop_event.wait(timeout)
Wait till the Coordinator is told to stop. Args: timeout: Float. Sleep for up to that many seconds waiting for should_stop() to become True. Returns: True if the Coordinator is told stop, False if the timeout expired.
github-repos
def extract_certs(certs_txt: str) -> List[crypto.X509]: pattern = '-----BEGIN CERTIFICATE-----.+?-----END CERTIFICATE-----' certs_txt = re.findall(pattern, certs_txt, flags=re.DOTALL) certs = [crypto.load_certificate(crypto.FILETYPE_PEM, cert_txt) for cert_txt in certs_txt] return certs
Extracts pycrypto X509 objects from SSL certificates chain string. Args: certs_txt: SSL certificates chain string. Returns: result: List of pycrypto X509 objects.
codesearchnet
def __init__(self, file_handle, schema): if not file_handle.writable(): raise ValueError('Output stream must be writable') self._file_handle = file_handle avro_schema = fastavro.parse_schema(get_avro_schema_from_table_schema(schema)) self._avro_writer = fastavro.write.Writer(self._file_handle, a...
Initialize an AvroRowWriter. Args: file_handle (io.IOBase): Output stream to write Avro records to. schema (Dict[Text, Any]): BigQuery table schema.
github-repos
def _ParseFileEntry(self, knowledge_base, file_entry): if file_entry.link: _, _, time_zone = file_entry.link.partition('zoneinfo/') else: file_object = file_entry.GetFileObject() time_zone = None try: time_zone_file = tz.tzfile(file_object) date_time ...
Parses artifact file system data for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails.
juraj-google-style
def Merge(self, other): if type(self) != type(other): raise TypeError('Attempt to Merge() differently typed Maps: %r != %r' % (type(self), type(other))) if other.GetModifyTimestamp() and self.GetModifyTimestamp(): if other.GetModifyTimestamp() < self.GetModifyTimestamp(): raise error...
Update this Map based on another Map. Walk over other and for each entry, Add() it if it doesn't exist -- this will update changed entries as well as adding new ones. Args: other: A maps.Map instance. Returns: True if anything was added or modified, False if nothing changed. Raises: TypeError: Merging differently t...
github-repos
def ProcessNewBlock(self, block): added = set() changed = set() deleted = set() try: for tx in block.FullTransactions: for (index, output) in enumerate(tx.outputs): state = self.CheckAddressState(output.ScriptHash) if ((state & AddressState.InWallet) >...
Processes a block on the blockchain. This should be done in a sequential order, ie block 4 should be only processed after block 3. Args: block: (neo.Core.Block) a block on the blockchain.
codesearchnet
def _GetStat(self): stat_object = vfs_stat.VFSStat() stat_object.size = self.path_spec.range_size stat_object.type = stat_object.TYPE_FILE return stat_object
Retrieves a stat object. Returns: VFSStat: a stat object. Raises: BackEndError: when the encoded stream is missing.
codesearchnet
def from_celery(cls, worker_name, job_dict, celery_app): if ((not isinstance(job_dict, dict)) or ('id' not in job_dict)): raise JobStatInvalid('The job description is missing important fields.') async_result = AsyncResult(id=job_dict['id'], app=celery_app) a_info = (async_result.info if isinstance(a...
Create a JobStats object from the dictionary returned by celery. Args: worker_name (str): The name of the worker this jobs runs on. job_dict (dict): The dictionary as returned by celery. celery_app: Reference to a celery application object. Returns: JobStats: A fully initialized JobStats object.
codesearchnet
def get_metrics_namespace(self) -> str: return 'BeamML_Onnx'
Returns: A namespace for metrics collected by the RunInference transform.
github-repos
def initialize_schema(connection): cursor = connection.cursor() cursor.execute("PRAGMA application_id={}".format(_TENSORBOARD_APPLICATION_ID)) cursor.execute("PRAGMA user_version={}".format(_TENSORBOARD_USER_VERSION)) with connection: for statement in _SCHEMA_STATEMENTS: lines = statement.strip('\n...
Initializes the TensorBoard sqlite schema using the given connection. Args: connection: A sqlite DB connection.
juraj-google-style
def create_datasets(): if use_device: datasets = [] for i in range(len(self.embedding_devices)): datasets.append(dataset_ops.DatasetV2.from_tensor_slices({'feature': [[[i % self._num_cores_per_replica]]]}).repeat()) return datasets else: dataset = strategy.distribute_...
Creates either a per-replica dataset, or multiple per-devices ones. This function explicitly creates per-device datasets because the strategy does not produce a distributed dataset in the model-parallel case; there is only one replica. Without this consideration, the embeddings would be read as [0, 0] instead of the e...
github-repos
def aggregate(self, dataset_ids=None, boundary='exact', side='left', func='mean', **dim_kwargs): new_scn = self.copy(datasets=dataset_ids) for (src_area, ds_ids) in new_scn.iter_by_area(): if (src_area is None): for ds_id in ds_ids: new_scn.datasets[ds_id] = self[ds_id] ...
Create an aggregated version of the Scene. Args: dataset_ids (iterable): DatasetIDs to include in the returned `Scene`. Defaults to all datasets. func (string): Function to apply on each aggregation window. One of 'mean', 'sum', 'min', 'max', 'median', 'argmin', 'argmax', 'prod', 'std', 'var'. 'mean' is the default. b...
codesearchnet
def BuildDefaultValue(self, value_cls): try: return value_cls() except Exception as e: logging.exception(e) raise DefaultValueError(("Can't create default for value %s: %s" % (value_cls.__name__, e)))
Renders default value of a given class. Args: value_cls: Default value of this class will be rendered. This class has to be (or to be a subclass of) a self.value_class (i.e. a class that this renderer is capable of rendering). Returns: An initialized default value. Raises: DefaultValueError: if something goes wrong.
codesearchnet
def remove(self, key): self.raise_error_if_not_open() if (key in self._file): del self._file[key]
Remove the data stored for the given key. Args: key (str): Key of the data to remove. Note: The container has to be opened in advance.
codesearchnet
def load(config): if config.sys_path: logger.debug("Appending %s to sys.path.", config.sys_path) sys.path.append(config.sys_path) logger.debug("sys.path is now %s", sys.path) if config.lookups: for key, handler in config.lookups.items(): register_lookup_handler(...
Loads a stacker configuration by modifying sys paths, loading lookups, etc. Args: config (:class:`Config`): the stacker config to load. Returns: :class:`Config`: the stacker config provided above.
juraj-google-style
def parse_init_dat(infile): init_dict = {} log.debug('{}: reading file...'.format(infile)) with open(infile, 'r') as f: head = [next(f).strip() for x in range(2)] summary = head[0].split() difficulty = summary[1] top_template_info = head[1].split() top_template...
Parse the main init.dat file which contains the modeling results The first line of the file init.dat contains stuff like:: "120 easy 40 8" The other lines look like this:: " 161 11.051 1 1guqA MUSTER" and getting the first 10 gives you the top 10 templates used in modeling Args: infile (stt): Path to ...
juraj-google-style
def resolve_mode(self, name): if name not in settings.CODEMIRROR_MODES: msg = ("Given config name '{}' does not exists in " "'settings.CODEMIRROR_MODES'.") raise UnknowModeError(msg.format(name)) return settings.CODEMIRROR_MODES.get(name)
From given mode name, return mode file path from ``settings.CODEMIRROR_MODES`` map. Arguments: name (string): Mode name. Raises: KeyError: When given name does not exist in ``settings.CODEMIRROR_MODES``. Returns: string: Mode file path.
juraj-google-style
class PatchTSMixerLinearHead(nn.Module): def __init__(self, config: PatchTSMixerConfig, distribution_output=None): super().__init__() self.head_aggregation = config.head_aggregation self.output_range = config.output_range if config.head_aggregation is None: mul_factor = ...
Linear head for Classification and Regression. Args: config (`PatchTSMixerConfig`): Configuration.
github-repos
def field(*, validate: Optional[Callable[[_In], _OutT]]=None, **kwargs: Any) -> dataclasses.Field[_OutT]: if validate is None: return dataclasses.field(**kwargs) else: field_ = _Field(validate=validate, field_kwargs=kwargs) return typing.cast(dataclasses.Field, field_)
Like `dataclasses.field`, but allow `validator`. Args: validate: A callable `(x) -> x` called each time the variable is assigned. **kwargs: Kwargs forwarded to `dataclasses.field` Returns: The field.
github-repos
def for_new_graph(*args, **kwargs): graph = tf.Graph() with graph.as_default(): return for_default_graph(*args, **kwargs)
Creates a Bookkeeper for a new graph. You must use `m.g.as_default()` to put the graph in scope: m = Bookkeeper.for_new_graph() with m.g.as_default(): ... Args: *args: Arguments to pass into Bookkeeper's constructor. **kwargs: Arguments to pass into Bookkeeper's constructor. Returns: A new Bookkeeper.
juraj-google-style
def progress(self): return Progress(done=len(self._get_all_set_properties()), base=len(worker_mapping()))
Get progress. Returns: namedtuple: :class:`Progress`.
codesearchnet
def inspect_node(self, node_id): url = self._url('/nodes/{0}', node_id) return self._result(self._get(url), True)
Retrieve low-level information about a swarm node Args: node_id (string): ID of the node to be inspected. Returns: A dictionary containing data about this node. Raises: :py:class:`docker.errors.APIError` If the server returns an error.
codesearchnet
def __init__(self, match=Match(), table_id=0xff, out_port=Port.OFPP_NONE): super().__init__() self.match = match self.table_id = table_id self.out_port = out_port
Create a AggregateStatsRequest with the optional parameters below. Args: match (~pyof.v0x01.common.flow_match.Match): Fields to match. table_id (int): ID of table to read (from pyof_table_stats) 0xff for all tables or 0xfe for emergency. out_port (int): Require matching entries to include this as an output port. A val...
juraj-google-style
def expected_counts(dataframe, rownames, colnames): cont_table = contingency_table(dataframe, rownames=rownames, colnames=colnames, margins=True) row_counts = cont_table['All'] column_counts = cont_table.loc['All'] total_observations = cont_table['All']['All'] for column in cont_table.col...
Expected counts of the multivariate frequency distribution of the variables given the null hypothesis of complete independence between variables. Args: rownames: the column name or list of columns names that make the keys of the rows colnames: the column name or list of columns names that make the keys of the columns
juraj-google-style
class PerceiverAudioPostprocessor(nn.Module): def __init__(self, config: PerceiverConfig, in_channels: int, postproc_type: str='patches') -> None: super().__init__() if postproc_type not in ('patches',): raise ValueError('Invalid postproc_type!') self.classifier = nn.Linear(in_c...
Audio postprocessing for Perceiver. Can be used to convert the decoder output to audio features. Args: config ([*PerceiverConfig*]): Model configuration. in_channels (`int`): Number of channels in the input. postproc_type (`str`, *optional*, defaults to `"patches"`): Postprocessor type to use. Currently, only "patches...
github-repos
def dataset(self, mode, hparams=None, global_step=None, **kwargs): datasets = [p.dataset(mode, **kwargs) for p in self.problems] datasets = [d.map((lambda x, i=j: self.normalize_example(dict(x, problem_id=tf.constant([i])), hparams))) for (j, d) in enumerate(datasets)] if (mode is problem.DatasetSplit.TRAIN...
Returns a dataset containing examples from multiple problems. Args: mode: A member of problem.DatasetSplit. hparams: A tf.HParams object, the model hparams. global_step: A scalar tensor used to compute the sampling distribution. If global_step is None, we call tf.train.get_or_create_global_step by default. **kwargs: K...
codesearchnet
def set_number_of_partitions(self, number_of_partitions): if self._frozen: if self._number_of_partitions != number_of_partitions: raise ValueError(f"Can't set number_of_partitions to {number_of_partitions} since it has been frozen to use {self._number_of_partitions}.") else: self._nu...
Sets the number of partitions for the current policy. If the policy has been frozen then shard_dimension must match the existing setting. Args: number_of_partitions: The number of partitions to use in the policy. Raises: ValueError: If the policy has been frozen and shard_dimension differs from the frozen value.
github-repos
def to_script(self, wf_name='wf'): self._closed() script = [] params = [] returns = [] for (name, typ) in self.wf_inputs.items(): params.append("{}='{}'".format(name, typ)) returns.append(name) script.append('{} = {}.add_inputs({})'.format(', '.join(returns), wf_name, ', '.join(p...
Generated and print the scriptcwl script for the currunt workflow. Args: wf_name (str): string used for the WorkflowGenerator object in the generated script (default: ``wf``).
codesearchnet
def safe_indicator(self, indicator, errors='strict'): if (indicator is not None): try: indicator = quote(self.s(str(indicator), errors=errors), safe='~') except KeyError: indicator = quote(bytes(indicator), safe='~') return indicator
Indicator encode value for safe HTTP request. Args: indicator (string): Indicator to URL Encode errors (string): The error handler type. Returns: (string): The urlencoded string
codesearchnet
def forward(self, hidden_states: torch.Tensor, attention_mask: torch.Tensor, output_attentions: Optional[bool]=False) -> Tuple[torch.FloatTensor]: residual = hidden_states hidden_states = self.layer_norm1(hidden_states) hidden_states, attn_weights = self.self_attn(hidden_states=hidden_states, attention_mask...
Args: hidden_states (`torch.FloatTensor`): Input to the layer of shape `(batch, seq_len, embed_dim)`. attention_mask (`torch.FloatTensor`): Attention mask of shape `(batch, 1, q_len, k_v_seq_len)` where padding elements are indicated by very large negative values. output_attentions (`bool`, *optional*, defaults to `Fal...
github-repos
def __solve_for_scalar(expr, vars): var = solve(expr, vars).value try: scalar = repeated.getvalue(var) except TypeError: raise errors.EfilterTypeError( root=expr, query=expr.source, message="Wasn't expecting more than one value here. Got %r." % (var,)...
Helper: solve 'expr' always returning a scalar (not IRepeated). If the output of 'expr' is a single value or a single RowTuple with a single column then return the value in that column. Otherwise raise. Arguments: expr: Expression to solve. vars: The scope. Returns: A scalar value (not an IRepeated). Raises: Efilte...
juraj-google-style
def safe_rt(resource_type, lower=False): if resource_type is not None: resource_type = resource_type.replace(' ', '_') if lower: resource_type = resource_type.lower() return resource_type
Format the Resource Type. Takes Custom Indicator types with a space character and return a *safe* string. (e.g. *User Agent* is converted to User_Agent or user_agent.) Args: resource_type (string): The resource type to format. lower (boolean): Return type in all lower case Returns: (string): The formatted resource ...
juraj-google-style
def collection(self, collection_id): child_path = self._path + (collection_id,) return self._client.collection(*child_path)
Create a sub-collection underneath the current document. Args: collection_id (str): The sub-collection identifier (sometimes referred to as the "kind"). Returns: ~.firestore_v1beta1.collection.CollectionReference: The child collection.
juraj-google-style
def write(self, file_name): try: assert (file_name[(- 6):] == '.xhtml') except (AssertionError, IndexError): raise ValueError('filename must end with .xhtml') with open(file_name, 'wb') as f: f.write(self.content.encode('utf-8'))
Writes the chapter object to an xhtml file. Args: file_name (str): The full name of the xhtml file to save to.
codesearchnet
def create_autoscale_setting(access_token, subscription_id, resource_group, setting_name, vmss_name, location, minval, maxval, default, autoscale_rules, notify=None): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', resource_group, '/providers/microsoft.insights/autosca...
Create a new autoscale setting for a scale set. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. resource_group (str): Azure resource group name. setting_name (str): Name of the autoscale setting. vmss_name (str): Name of scale set to apply scale events to. lo...
codesearchnet
def isPortAvailable(port='/dev/ttyUSB0'): isPortAvailable = serial.tools.list_ports.grep(port) try: next(isPortAvailable) available = True except StopIteration: available = False return available
Checks whether specified port is available. Source code derived from @lqdev suggestion per #38 Args: port: Serial port location i.e. 'COM1'. Default is /dev/ttyUSB0 Returns: available: Boolean value indicating presence of port
juraj-google-style
def export(bundle, force=False, force_restricted=False): if (not ckan): raise EnvironmentError(MISSING_CREDENTIALS_MSG) try: ckan.action.package_create(**_convert_bundle(bundle)) except ckanapi.ValidationError: if force: logger.warning('{} dataset already exported, but ne...
Exports bundle to ckan instance. Args: bundle (ambry.bundle.Bundle): force (bool, optional): if True, ignore existance error and continue to export. force_restricted (bool, optional): if True, then export restricted bundles as private (for debugging purposes). Raises: EnvironmentError: if ckan credentials are missing...
codesearchnet
def _batch_norm_op(self, input_batch, mean, variance, use_batch_stats, stat_dtype): if self._fused: (batch_norm_op, mean, variance) = self._fused_batch_norm_op(input_batch, self._moving_mean, self._moving_variance, use_batch_stats) else: batch_norm_op = tf.nn.batch_normalization(input_batch, mea...
Creates a batch normalization op. It uses the tf.nn.batch_normalization op by default and the tf.nn.fused_batch_norm op to support fused batch normalization. Args: input_batch: A input Tensor of arbitrary dimension. mean: A mean tensor, of the same dtype as `input_batch`. variance: A variance tensor, of the same dtyp...
codesearchnet
def output(self, stream, value): if stream not in self.outputs: raise ValueError("Stream is not an output of this operator.") e = self.expression(value) e._stream = stream return e
SPL output port assignment expression. Arguments: stream(Stream): Output stream the assignment is for. value(str): SPL expression used for an output assignment. This can be a string, a constant, or an :py:class:`Expression`. Returns: Expression: Output assignment expression that is valid as a the context of this oper...
juraj-google-style
def sentences(self, index = None): if index is None: return self.select(Sentence,None,True,default_ignore_structure) else: if index < 0: index = self.count(Sentence,None,True,default_ignore_structure) + index for i,e in enumerate(self.select(S...
Returns a generator of Sentence elements found (recursively) under this element Arguments: index (int or None): If set to an integer, will retrieve and return the n'th element (starting at 0) instead of returning a generator of all
juraj-google-style
def start(self, input_data, output_data, transform_resources, **kwargs): self.transform_resources = transform_resources self.input_data = input_data self.output_data = output_data image = self.primary_container['Image'] instance_type = transform_resources['InstanceType'...
Start the Local Transform Job Args: input_data (dict): Describes the dataset to be transformed and the location where it is stored. output_data (dict): Identifies the location where to save the results from the transform job transform_resources (dict): compute instances for the transform job. Currently only supports l...
juraj-google-style
def add_state(self, state_name, initial_state, batch_size=None): state_shape = initial_state.get_shape().as_list() full_shape = [batch_size] + state_shape if not batch_size: shape_proto = self._as_shape_proto([0] + state_shape) batch_size = 1 else: ...
Adds a state to the state saver. Args: state_name: The name of this state. initial_state: The initial state vector. Only zeros are supported. batch_size: The batch_size or None for unknown.
juraj-google-style
def from_csv(cls, filename=None, text=None): if ((filename is None) and (text is None)): raise LegendError('You must provide a filename or CSV text.') if (filename is not None): with open(filename, 'r') as f: text = f.read() try: f = StringIO(text) except TypeError: ...
Read CSV text and generate a Legend. Args: string (str): The CSV string. In the first row, list the properties. Precede the properties of the component with 'comp ' or 'component '. For example: colour, width, comp lithology, comp colour #FFFFFF, 0, , #F7E9A6, 3, Sandstone, Grey #FF99CC, 2, Anhydrite, ... etc Note...
codesearchnet
def list_metadata(self, resource): self.metadata_service.set_auth(self._token_metadata) return self.metadata_service.list(resource)
List all keys associated with the given resource. Args: resource (intern.resource.boss.BossResource) Returns: (list) Raises: requests.HTTPError on a failure.
codesearchnet
def read_molden(inputfile, start_index=0, get_bonds=True): from chemcoord.cartesian_coordinates.cartesian_class_main import Cartesian with open(inputfile, 'r') as f: found = False while (not found): line = f.readline() if ('[N_GEO]' in line): found = True ...
Read a molden file. Args: inputfile (str): start_index (int): Returns: list: A list containing :class:`~chemcoord.Cartesian` is returned.
codesearchnet
def do_patch(endpoint, body, access_token): headers = {"content-type": "application/json", "Authorization": 'Bearer ' + access_token} headers['User-Agent'] = get_user_agent() return requests.patch(endpoint, data=body, headers=headers)
Do an HTTP PATCH request and return JSON. Args: endpoint (str): Azure Resource Manager management endpoint. body (str): JSON body of information to patch. access_token (str): A valid Azure authentication token. Returns: HTTP response. JSON body.
juraj-google-style
def _new_open_bin(self, remaining_rect): factories_to_delete = set() new_bin = None for (key, binfac) in self._empty_bins.items(): a_rectangle_fits = False for (_, rect) in remaining_rect.items(): if binfac.fits_inside(rect[0], rect[1]): a_rectangle_fits = True ...
Extract the next bin where at least one of the rectangles in rem Arguments: remaining_rect (dict): rectangles not placed yet Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found
codesearchnet
def exchange(self, pubkey): try: return self.priv.exchange(c_ec.ECDH(), pubkey.publ) except ValueError as e: raise s_exc.BadEccExchange(mesg=str(e))
Perform a ECDH key exchange with a public key. Args: pubkey (PubKey): A PubKey to perform the ECDH with. Returns: bytes: The ECDH bytes. This is deterministic for a given pubkey and private key.
juraj-google-style
def upload_from_url(cls, url, store=None, filename=None): if (store is None): store = 'auto' elif store: store = '1' else: store = '0' data = {'source_url': url, 'store': store} if filename: data['filename'] = filename result = uploading_request('POST', 'from_url/...
Uploads file from given url and returns ``FileFromUrl`` instance. Args: - url (str): URL of file to upload to - store (Optional[bool]): Should the file be automatically stored upon upload. Defaults to None. - False - do not store file - True - store file (can result in error if autostore is disabled for project) - Non...
codesearchnet
def _get_resource_view(self, resource_view): if isinstance(resource_view, dict): resource_view = ResourceView(resource_view, configuration=self.configuration) if isinstance(resource_view, ResourceView): return resource_view raise HDXError('Type %s is not...
Get resource view id Args: resource_view (Union[ResourceView,Dict]): ResourceView metadata from a ResourceView object or dictionary Returns: ResourceView: ResourceView object
juraj-google-style
def to_bytesize(value, default_unit=None, base=DEFAULT_BASE): if isinstance(value, (int, float)): return unitized(value, default_unit, base) if (value is None): return None try: if (value[(- 1)].lower() == 'b'): value = value[:(- 1)] unit = value[(- 1):].lower() ...
Convert `value` to bytes, accepts notations such as "4k" to mean 4096 bytes Args: value (str | unicode | int | None): Number of bytes optionally suffixed by a char from UNITS default_unit (str | unicode | None): Default unit to use for unqualified values base (int): Base to use (usually 1024) Returns: (int | None): D...
codesearchnet
def read_trailer(self): _logger.debug('Reading chunked trailer.') trailer_data_list = [] while True: trailer_data = (yield from self._connection.readline()) trailer_data_list.append(trailer_data) if (not trailer_data.strip()): break return b''.join(trailer_data_list)
Read the HTTP trailer fields. Returns: bytes: The trailer data. Coroutine.
codesearchnet
def show(self, *args, **kwargs): plt = self.get_pourbaix_plot(*args, **kwargs) plt.show()
Shows the pourbaix plot Args: *args: args to get_pourbaix_plot **kwargs: kwargs to get_pourbaix_plot Returns: None
codesearchnet
def random_square_mask(shape, fraction): mask = np.ones(shape) patch_area = ((shape[0] * shape[1]) * fraction) patch_dim = np.int(math.floor(math.sqrt(patch_area))) if ((patch_area == 0) or (patch_dim == 0)): return mask x = np.random.randint((shape[0] - patch_dim)) y = np.random.randint...
Create a numpy array with specified shape and masked fraction. Args: shape: tuple, shape of the mask to create. fraction: float, fraction of the mask area to populate with `mask_scalar`. Returns: numpy.array: A numpy array storing the mask.
codesearchnet
def non_trainable_variables(self): return tuple(self._flatten(predicate=_is_non_trainable_variable, expand_composites=True))
Sequence of non-trainable variables owned by this module and its submodules. Note: this method uses reflection to find variables on the current instance and submodules. For performance reasons you may wish to cache the result of calling this method if you don't expect the return value to change. Returns: A sequence o...
github-repos
def Cancel(self, request, global_params=None): config = self.GetMethodConfig('Cancel') return self._RunMethod(config, request, global_params=global_params)
Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs. Args: request: (BigqueryJobsCancelRequest) input message global_params: (StandardQueryParameters, default: None) gl...
github-repos
def download_and_extract(path, url, input_filename, target_filename): logging.info(('Downloading and extracting data to: %s' % path)) input_file = find_file(path, input_filename) target_file = find_file(path, target_filename) if (input_file and target_file): logging.info(('Already downloaded and...
Extract files from downloaded compressed archive file. Args: path: string directory where the files will be downloaded url: url containing the compressed input and target files input_filename: name of file containing data in source language target_filename: name of file containing data in target language Returns: Ful...
codesearchnet
def expand_tile(units, axis): assert axis in (1, 2) n_time_steps = tf.shape(units)[1] repetitions = [1, 1, 1, 1] repetitions[axis] = n_time_steps return tf.tile(tf.expand_dims(units, axis), repetitions)
Expand and tile tensor along given axis Args: units: tf tensor with dimensions [batch_size, time_steps, n_input_features] axis: axis along which expand and tile. Must be 1 or 2
juraj-google-style
def remove_location(self, location): res = self._remove_hdxobject(self.data.get('groups'), location, matchon='name') if (not res): res = self._remove_hdxobject(self.data.get('groups'), location.upper(), matchon='name') if (not res): res = self._remove_hdxobject(self.data.get('groups'), locat...
Remove a location. If the location is already added, it is ignored. Args: location (str): Location to remove Returns: bool: True if location removed or False if not
codesearchnet
def _add_dns_records(self, conf, mgmts): nets = conf['nets'] dns_mgmt = mgmts[-1] LOGGER.debug('Using network %s as main DNS server', dns_mgmt) forward = conf['nets'][dns_mgmt].get('gw') dns_records = {} for net_name, net_spec in nets.iteritems(): dn...
Add DNS records dict('dns_records') to ``conf`` for each management network. Add DNS forwarder IP('dns_forward') for each none management network. Args: conf(spec): spec mgmts(list): management networks names Returns: None
juraj-google-style
def get_answers_for_student(student_item): submissions = sub_api.get_submissions(student_item) if (not submissions): return Answers() latest_submission = submissions[0] latest_answer_item = latest_submission.get('answer', {}) return Answers(latest_answer_item.get(ANSWER_LIST_KEY, []))
Retrieve answers from backend for a student and question Args: student_item (dict): The location of the problem this submission is associated with, as defined by a course, student, and item. Returns: Answers: answers for the student
codesearchnet
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) allowed_current_types = {field_schema_type} if (type_hints a...
Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. declared tags, whether the curren...
codesearchnet
def output_shapes(self): return nest.map_structure(lambda component_spec: component_spec._to_legacy_output_shapes(), self._element_spec)
Returns the shape of each component of an element of this iterator. Returns: A (nested) structure of `tf.TensorShape` objects corresponding to each component of an element of this dataset.
github-repos
def subcomponents(self, subcomponents): for arg in self.args: if (arg.__class__.__name__ == 'Function'): subcomponents.append(arg.to_string()) if (arg.function_type == 'primary'): arg.subcomponents(subcomponents) else: subcomponents.append(arg.to_s...
Generate subcomponents of the BEL subject or object These subcomponents are used for matching parts of a BEL subject or Object in the Edgestore. Args: AST subcomponents: Pass an empty list to start a new subcomponents request Returns: List[str]: subcomponents of BEL subject or object
codesearchnet
def logistic(x: Union[float, np.ndarray], k: float, theta: float) -> Optional[float]: r if x is None or k is None or theta is None: return None return 1 / (1 + np.exp(-k * (x - theta)))
r""" Standard logistic function. .. math:: y = \frac {1} {1 + e^{-k (x - \theta)}} Args: x: :math:`x` k: :math:`k` theta: :math:`\theta` Returns: :math:`y`
juraj-google-style
def create(self, document_data): batch = self._client.batch() batch.create(self, document_data) write_results = batch.commit() return _first_write_result(write_results)
Create the current document in the Firestore database. Args: document_data (dict): Property names and values to use for creating a document. Returns: google.cloud.firestore_v1beta1.types.WriteResult: The write result corresponding to the committed document. A write result contains an ``update_time`` field. Raises: ~...
codesearchnet
def getall(self): matches = ROUTES_RE.findall(self.config) routes = dict() for match in matches: ip_dest = match[0] next_hop = match[1] next_hop_ip = (None if (match[2] is '') else match[2]) distance = int(match[3]) data = {} data['tag'] = (None if (match[4] i...
Return all ip routes configured on the switch as a resource dict Returns: dict: An dict object of static route entries in the form:: { ip_dest: { next_hop: { next_hop_ip: { distance: { 'tag': tag, 'route_name': route_name } } } } } If the ip address specified does not have any associated static routes, then None is ...
codesearchnet
def transformer_latent_decoder(x, encoder_output, ed_attention_bias, hparams, name=None): with tf.variable_scope(name, default_name='transformer_latent_dec'): batch_size = common_layers.shape_list(x)[0] compressed_img_len = (hparams.img_len x = tf.reshape(x, [batch_size, compressed_img_len,...
Transformer decoder over latents using latent_attention_type. Args: x: Tensor of shape [batch, length_q, hparams.hidden_size]. length_q is the latent length, which is height * width * hparams.num_latents / (2**hparams.num_compress_steps). encoder_output: Tensor of shape [batch, length_kv, hparams.hidden_size]. ed_atte...
codesearchnet
def _get_row_fetcher(self, start_row=0, max_rows=None, page_size=_DEFAULT_PAGE_SIZE): if (not start_row): start_row = 0 elif (start_row < 0): if (self.length >= 0): start_row += self.length else: raise Exception('Cannot use negative indices for table of unknown le...
Get a function that can retrieve a page of rows. The function returned is a closure so that it can have a signature suitable for use by Iterator. Args: start_row: the row to start fetching from; default 0. max_rows: the maximum number of rows to fetch (across all calls, not per-call). Default is None which means no l...
codesearchnet
def add_key_value(self, key, value): key = self._metadata_map.get(key, key) if key in ['dateAdded', 'lastModified']: self._data[key] = self._utils.format_datetime(value, date_format='%Y-%m-%dT%H:%M:%SZ') elif key == 'confidence': self._data[key] = int(value) ...
Converts the value and adds it as a data field. Args: key: value:
juraj-google-style
def has_attribute(self, attribute: str) -> bool: return any([ key_node.value == attribute for key_node, _ in self.yaml_node.value ])
Whether the node has an attribute with the given name. Use only if is_mapping() returns True. Args: attribute: The name of the attribute to check for. Returns: True iff the attribute is present.
juraj-google-style
def _gal2idx(self, gal): l = coordinates.Longitude(gal.l, wrap_angle=(180.0 * units.deg)) j = (self._inv_pix_scale * (l.deg - self._l_bounds[0])).astype('i4') k = (self._inv_pix_scale * (gal.b.deg - self._b_bounds[0])).astype('i4') idx = ((((j < 0) | (j >= self._shape[0])) | (k < 0)) | (k >= self._shape...
Converts from Galactic coordinates to pixel indices. Args: gal (:obj:`astropy.coordinates.SkyCoord`): Galactic coordinates. Must store an array of coordinates (i.e., not be scalar). Returns: ``j, k, mask`` - Pixel indices of the coordinates, as well as a mask of in-bounds coordinates. Outputs have the same shape as t...
codesearchnet
def deleted(self, main_type, sub_type, deleted_since, owner=None, filters=None, params=None): params = params or {} if filters and filters.filters: params['filters'] = filters.filters_string if owner: params['owner'] = owner if deleted_since: ...
Args: owner: filters: main_type: sub_type: deleted_since: params: Return:
juraj-google-style
def raw_filter(self, filters): return SearchResult(self, self._api.get(self._href, **{'filter[]': filters}))
Sends all filters to the API. No fancy, just a wrapper. Any advanced functionality shall be implemented as another method. Args: filters: List of filters (strings) Returns: :py:class:`SearchResult`
codesearchnet