code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def run_metadata(name, data, step=None):
summary_metadata = summary_pb2.SummaryMetadata()
summary_metadata.plugin_data.plugin_name = 'graph_run_metadata'
summary_metadata.plugin_data.content = b'1'
with summary_scope(name, 'graph_run_metadata_summary', [data, step]) as (tag, _):
with ops.device(... | Writes entire RunMetadata summary.
A RunMetadata can contain DeviceStats, partition graphs, and function graphs.
Please refer to the proto for definition of each field.
Args:
name: A name for this summary. The summary tag used for TensorBoard will be
this name prefixed by any active name scopes.
data: A RunMetadata p... | github-repos |
def publish(self, topic, dct):
get_logger().info('Publishing message {} on routing key {}...'.format(dct, topic))
self._channel.basic_publish(exchange=self.exchange, routing_key=topic, body=json.dumps(dct)) | Send a dict with internal routing key to the exchange.
Args:
topic: topic to publish the message to
dct: dict object to send | codesearchnet |
def from_dict(cls, d, fmt=None):
if fmt == "abivars":
from pymatgen.io.abinit.abiobjects import structure_from_abivars
return structure_from_abivars(cls=cls, **d)
lattice = Lattice.from_dict(d["lattice"])
sites = [PeriodicSite.from_dict(sd, lattice) for sd in d[... | Reconstitute a Structure object from a dict representation of Structure
created using as_dict().
Args:
d (dict): Dict representation of structure.
Returns:
Structure object | juraj-google-style |
def yaml_to_ordered_dict(stream, loader=yaml.SafeLoader):
class OrderedUniqueLoader(loader):
NO_DUPE_SIBLINGS = ["stacks", "class_path"]
NO_DUPE_CHILDREN = ["stacks"]
def _error_mapping_on_dupe(self, node, node_name):
if isinstance(n... | Provides yaml.load alternative with preserved dictionary order.
Args:
stream (string): YAML string to load.
loader (:class:`yaml.loader`): PyYAML loader class. Defaults to safe
load.
Returns:
OrderedDict: Parsed YAML. | juraj-google-style |
def user_exists(self, username):
response = self._get(self.rest_url + "/user",
params={"username": username})
if not response.ok:
return None
return True | Determines if the user exists.
Args:
username: The user name.
Returns:
bool:
True if the user exists in the Crowd application. | juraj-google-style |
def ParseFileObject(self, parser_mediator, file_object):
if file_object.read(1) != b'{':
raise errors.UnableToParseFile((
'[{0:s}] {1:s} is not a valid Preference file, '
'missing opening brace.').format(
self.NAME, parser_mediator.GetDisplayName()))
file_objec... | Parses a Chrome preferences file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): file-like object.
Raises:
UnableToParseFile: when the file cannot be parsed. | juraj-google-style |
def __init__(self, scope, parent, explicit=True):
CodeStatement.__init__(self, scope, parent)
self.body = []
self.explicit = explicit | Constructor for code blocks.
Args:
scope (CodeEntity): The program scope where this object belongs.
parent (CodeEntity): This object's parent in the program tree.
Kwargs:
explicit (bool): Whether the block is explicit in the code. | juraj-google-style |
def clone(self, name=None):
if name is None:
name = self.module_name + "_clone"
return type(self)(output_channels=self.output_channels,
kernel_shape=self._kernel_shape,
stride=self._stride,
rate=self._rate,
paddi... | Returns a cloned `_ConvND` module.
Args:
name: Optional string assigning name of cloned module. The default name
is constructed by appending "_clone" to `self.module_name`.
Returns:
A copy of the current class. | juraj-google-style |
def tokenize(self, s, pattern=None, active=None):
if pattern is None:
if self.tokenize_pattern is None:
pattern = r'[ \t]+'
else:
pattern = self.tokenize_pattern
if active is None:
active = self.active
return self.group... | Rewrite and tokenize the input string *s*.
Args:
s (str): the input string to process
pattern (str, optional): the regular expression pattern on
which to split tokens; defaults to `[ \t]+`
active (optional): a collection of external module names
that may be applied if called
Returns:
a :class:`~delphin.tokens.YyTokenL... | juraj-google-style |
def _process_parameters_section(func_documentation, sig, func, class_name, model_name_lowercase, parent_class, indent_level):
docstring = set_min_indent('Args:\n', indent_level + 4)
undocumented_parameters = []
documented_params = {}
documented_kwargs = {}
if func_documentation is not None:
... | Process the parameters section of the docstring.
Args:
func_documentation (`str`): Existing function documentation (manually specified in the docstring)
sig (`inspect.Signature`): Function signature
func (`function`): Function the parameters belong to
class_name (`str`): Name of the class the function belongs to
model... | github-repos |
def retrieve_token(self, token):
headers = self.client._get_private_headers()
endpoint = '/tokens/{}'.format(token)
return self.client._get(self.client.URL_BASE + endpoint, headers=headers) | Retrieve Token details for a specific Token.
Args:
token: The identifier of the token.
Returns: | juraj-google-style |
def get_link_flags():
is_mac = _platform.system() == 'Darwin'
ver = _VERSION.split('.')[0]
flags = []
if not _MONOLITHIC_BUILD:
flags.append('-L%s' % get_lib())
if is_mac:
flags.append('-ltensorflow_framework.%s' % ver)
else:
flags.append('-l:libtensorflow... | Returns the linker flags for linking with TensorFlow.
The returned list of arguments can be passed to the linker for linking against
TensorFlow. The result is platform dependent.
For example, on a typical Linux system with Python 3.7 the following command
prints `['-L/usr/local/lib/python3.7/dist-packages/tensorflow'... | github-repos |
def __parse_cmd_args(args, sudo, shell):
if (isinstance(args, tuple) and (len(args) == 1) and isinstance(args[0], tuple)):
args = args[0]
if shell:
if isinstance(args, six.string_types):
pass
elif (isinstance(args, (list, tuple)) and (len(args) > 1)):
args = ' '.j... | When shell is True, Popen will only accept strings. No tuples
Shell really should not be true.
Returns:
args suitable for subprocess.Popen
I'm not quite sure what those are yet. Plain old string seem to work
well? But I remember needing shlex at some point.
CommandLine:
python -m utool.util_cplat --test-__parse_cmd_... | codesearchnet |
def is_valid_assignment(self, mtf_dimension_name, mesh_dimension_name):
return ((mtf_dimension_name in self._splittable_mtf_dimension_names) and ((self._mtf_dimension_name_to_size_gcd[mtf_dimension_name] % self._mesh_dimension_name_to_size[mesh_dimension_name]) == 0)) | Whether this MTF dimension may be assigned to this mesh dimension.
Args:
mtf_dimension_name: string, the name of a Mesh TensorFlow dimension.
mesh_dimension_name: string, the name of a mesh dimension.
Returns:
A boolean indicating whether the assignment is valid. | codesearchnet |
def parse(file_contents, file_name):
env = Environment()
result = ""
try:
env.parse(file_contents)
except Exception:
_, exc_value, _ = sys.exc_info()
result += "ERROR: Jinja2 Template File: {0}".format(file_name)
result += repr(exc_value) + '\n'
return result | Takes a list of files which are assumed to be jinja2 templates and tries to
parse the contents of the files
Args:
file_contents (str): File contents of a jinja file
Raises:
Exception: An exception is raised if the contents of the file cannot be
parsed. | juraj-google-style |
def with_values(self, new_values):
new_values = _convert_to_ragged_tensor_values(new_values)
new_values.shape.with_rank_at_least(1)
self.values.shape[:1].assert_is_compatible_with(new_values.shape[:1])
if isinstance(new_values, RaggedTensor) and self._row_partition.dtype != new_values.row_splits.dtype:
... | Returns a copy of `self` with `values` replaced by `new_value`.
Preserves cached row-partitioning tensors such as `self.cached_nrows` and
`self.cached_value_rowids` if they have values.
Args:
new_values: Potentially ragged tensor to use as the `values` for the
returned `RaggedTensor`. Must have `rank > 0`, and must ... | github-repos |
def MakePmfFromItems(t, name=''):
pmf = Pmf(dict(t), name)
pmf.Normalize()
return pmf | Makes a PMF from a sequence of value-probability pairs
Args:
t: sequence of value-probability pairs
name: string name for this PMF
Returns:
Pmf object | juraj-google-style |
def locked_put(self, credentials):
keyring.set_password(self._service_name, self._user_name,
credentials.to_json()) | Write Credentials to file.
Args:
credentials: Credentials, the credentials to store. | juraj-google-style |
def write(gctoo, out_fname, data_null='NaN', metadata_null='-666', filler_null='-666', data_float_format='%.4f'):
if (not out_fname.endswith('.gct')):
out_fname += '.gct'
f = open(out_fname, 'w')
dims = [str(gctoo.data_df.shape[0]), str(gctoo.data_df.shape[1]), str(gctoo.row_metadata_df.shape[1]), s... | Write a gctoo object to a gct file.
Args:
gctoo (gctoo object)
out_fname (string): filename for output gct file
data_null (string): how to represent missing values in the data (default = "NaN")
metadata_null (string): how to represent missing values in the metadata (default = "-666")
filler_null (string): what value t... | codesearchnet |
def get_app_hostname():
if ((not is_running_on_app_engine()) or is_running_on_localhost()):
return None
app_id = app_identity.get_application_id()
prefix = get_hostname_prefix()
suffix = 'appspot.com'
if (':' in app_id):
tokens = app_id.split(':')
api_name = tokens[1]
... | Return hostname of a running Endpoints service.
Returns hostname of an running Endpoints API. It can be 1) "localhost:PORT"
if running on development server, or 2) "app_id.appspot.com" if running on
external app engine prod, or "app_id.googleplex.com" if running as Google
first-party Endpoints API, or 4) None if not r... | codesearchnet |
def info(name):
try:
handle_scm = win32service.OpenSCManager(None, None, win32service.SC_MANAGER_CONNECT)
except pywintypes.error as exc:
raise CommandExecutionError('Failed to connect to the SCM: {0}'.format(exc.strerror))
try:
handle_svc = win32service.OpenService(handle_scm, name,... | Get information about a service on the system
Args:
name (str): The name of the service. This is not the display name. Use
``get_service_name`` to find the service name.
Returns:
dict: A dictionary containing information about the service.
CLI Example:
.. code-block:: bash
salt '*' service.info spooler | codesearchnet |
def stop_instance(self):
stop_url = self._get_url('stop_path')
res = self.rest_client.session.put(stop_url, json={})
_handle_http_errors(res)
return res.json() | Stop the instance for this Streaming Analytics service.
Returns:
dict: JSON response for the instance stop operation. | codesearchnet |
def sync_executors(self):
if self._context_handle:
pywrap_tfe.TFE_ContextSyncExecutors(self._context_handle)
else:
raise ValueError('Context is not initialized.') | Sync both local executors and the ones on remote workers.
In async execution mode, local function calls can return before the
corresponding remote op/function execution requests are completed. Calling
this method creates a synchronization barrier for remote executors. It only
returns when all remote pending nodes are ... | github-repos |
def back_propagation(self, delta_arr):
re_encoder_delta_arr, delta_hidden_arr, re_encoder_grads_list = self.__retrospective_encoder.hidden_back_propagate(
delta_arr[:, -1]
)
re_encoder_grads_list.insert(0, None)
re_encoder_grads_list.insert(0, None)
observed... | Back propagation.
Args:
delta_output_arr: Delta.
Returns:
Tuple data.
- decoder's `list` of gradations,
- encoder's `np.ndarray` of Delta,
- encoder's `list` of gradations. | juraj-google-style |
def recombine(self, parents: List[pg.DNA], global_state: pg.geno.AttributeDict, step: int) -> List[pg.DNA]: | Generate a list of child DNA based on the list of parents given.
User should override this method with optional keyword arguments
'global_state' and 'step'.
The parents DNA contains a metadata field 'generation', which is the
generation of the parent DNA. If the Recombinator does not assign this
field for the new chi... | github-repos |
def console_get_width(con: tcod.console.Console) -> int:
return int(lib.TCOD_console_get_width(_console(con))) | Return the width of a console.
Args:
con (Console): Any Console instance.
Returns:
int: The width of a Console.
.. deprecated:: 2.0
Use `Console.width` instead. | codesearchnet |
def get(account_id, account_type_id=None):
if type(account_id) == str:
args = {'account_name': account_id}
else:
args = {'account_id': account_id}
if account_type_id:
args['account_type_id'] = account_type_id
return db.Account.find_one(**arg... | Return account by ID and type
Args:
account_id (`int`, `str`): Unique Account identifier
account_type_id (str): Type of account to get
Returns:
:obj:`Account`: Returns an Account object if found, else None | juraj-google-style |
def __init__(self, scaffold=None, master='', config=None, checkpoint_dir=None, checkpoint_filename_with_path=None):
self._checkpoint_dir = checkpoint_dir
self._checkpoint_filename_with_path = checkpoint_filename_with_path
self._scaffold = scaffold or Scaffold()
self._session_manager = None
self._mas... | Initializes a chief session creator.
Args:
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
checkp... | github-repos |
def can_user_access_build(param_name):
build_id = (request.args.get(param_name, type=int) or request.form.get(param_name, type=int) or request.json[param_name])
if (not build_id):
logging.debug('Build ID in param_name=%r was missing', param_name)
abort(400)
ops = operations.UserOps(current_u... | Determines if the current user can access the build ID in the request.
Args:
param_name: Parameter name to use for getting the build ID from the
request. Will fetch from GET or POST requests.
Returns:
The build the user has access to. | codesearchnet |
def __init__(self, filenames, index=0, buffer_size=None, _account_id=None,
delimiter=None):
self._filenames = filenames
self._index = index
self._buffer_size = buffer_size
self._account_id = _account_id
self._delimiter = delimiter
self._bucket = None
self._bucket_iter = N... | Initialize a GoogleCloudStorageInputReader instance.
Args:
filenames: A list of Google Cloud Storage filenames of the form
'/bucket/objectname'.
index: Index of the next filename to read.
buffer_size: The size of the read buffer, None to use default.
_account_id: Internal use only. See cloudstorage documentation.
deli... | juraj-google-style |
def _StopStyleSelectionMethod(self, doc):
if (not self.show_stop_hierarchy):
return (lambda stop: (None, None))
self._CreateStyle(doc, 'stop_entrance', {'IconStyle': {'color': 'ff0000ff'}})
self._CreateStyle(doc, 'entrance_connection', {'LineStyle': {'color': 'ff0000ff', 'width': '2'}})
self._Cr... | Create a method to determine which style to apply to a stop placemark.
Args:
doc: the KML document.
Returns:
A function that should accept a Stop argument and return a tuple of
(stop placemark style id, pathway placemark style id). Either style id
can be None, indicating no style should be set.
Given a Stop, we nee... | codesearchnet |
def _ReadCompressedData(self, read_size):
self._uncompressed_data = self._zip_ext_file.read(read_size)
self._uncompressed_data_size = len(self._uncompressed_data) | Reads compressed data from the file-like object.
Args:
read_size (int): number of bytes of compressed data to read. | juraj-google-style |
def to_json(value: Any, **kwargs) -> Any:
if isinstance(value, (type(None), bool, int, float, str)):
v = value
elif isinstance(value, JSONConvertible):
v = value.to_json(**kwargs)
elif isinstance(value, tuple):
v = [JSONConvertible.TUPLE_MARKER] + to_json(list(value), **kwargs)
e... | Serializes a (maybe) JSONConvertible value into a plain Python object.
Args:
value: value to serialize. Applicable value types are:
* Builtin python types: None, bool, int, float, string;
* JSONConvertible types;
* List types;
* Tuple types;
* Dict types.
**kwargs: Keyword arguments to pass to value.to_json if value... | github-repos |
def to_python(self):
return (self.selector, COMPARISON_MAP.get(self.comparison, self.comparison), self.argument) | Deconstruct the ``Constraint`` instance to a tuple.
Returns:
tuple: The deconstructed ``Constraint``. | codesearchnet |
def _parse_username(self, config):
(username, priv, role, nopass, fmt, secret, sshkey) = config
resource = dict()
resource['privilege'] = priv
resource['role'] = role
resource['nopassword'] = nopass == 'nopassword'
resource['format'] = fmt
resource['secre... | Scans the config block and returns the username as a dict
Args:
config (str): The config block to parse
Returns:
dict: A resource dict that is intended to be merged into the
user resource | juraj-google-style |
def AddBackpropAccumulatedValue(self, history_value, value, dead_branch=False):
history_ctxt = history_value.op._get_control_flow_context()
cond_ctxt = None
value_ctxt = value.op._get_control_flow_context()
while value_ctxt and value_ctxt != history_ctxt:
if isinstance(value_ctxt, control_flow_o... | Add the getter for an accumulated value in the grad context.
This is added to the backprop loop. Called in the grad context to
get the value of an accumulated value. The stack pop op must be guarded
by the pred of the controlling cond.
Args:
history_value: The history (a stack) of a value.
value: The value that is pu... | github-repos |
def create(self, resource):
return self.service.create(resource, self.url_prefix, self.auth, self.session, self.session_send_opts) | Create the given resource.
Args:
resource (intern.resource.boss.BossResource): Create a data model object with attributes matching those of the resource.
Returns:
(intern.resource.boss.BossResource): Returns resource of type requested on success.
Raises:
requests.HTTPError on failure. | codesearchnet |
def cancel(self, subscription_id, data={}, **kwargs):
url = '{}/{}/cancel'.format(self.base_url, subscription_id)
return self.post_url(url, data, **kwargs) | Cancel subscription given by subscription_id
Args:
subscription_id : Id for which subscription has to be cancelled
Returns:
Subscription Dict for given subscription id | codesearchnet |
def VerifyServerControlResponse(self, http_object):
if (http_object.code != 200):
return False
try:
(http_object.messages, http_object.source, http_object.nonce) = self.communicator.DecryptMessage(http_object.data)
return True
except communicator.DecodingError as e:
logging.i... | Verify the server response to a 'control' endpoint POST message.
We consider the message correct if and only if we can decrypt it
properly. Note that in practice we can not use the HTTP status to figure out
if the request worked because captive proxies have a habit of lying and
returning a HTTP success code even when ... | codesearchnet |
def greedy_decode(logits_fn, initial_ids, temperature=0.0, initial_states=None, eos_id=EOS_ID, forced_ids=None, use_tpu=True):
length_dim = initial_ids.shape.dims[(- 1)]
mesh = initial_ids.mesh
num_steps = mtf.constant(mesh, length_dim.size, dtype=tf.int32)
def cond_fn(step_num, prev_ids, *unused_state... | Greedy decoding.
Args:
logits_fn: Interface to the model, to provide logits.
Shoud take:
step_num - mtf Scalar
ids - mtf Tensor with shape [..., length]
states - list of mtf.Tensor
Should return:
logits - [batch, vocab_size]
new_states - list of mtf.Tensor
initial_ids: mtf.Tensor with shape [..., length], containing z... | codesearchnet |
def createEmails(nicks=None, nicksFile=None):
candidate_emails = set()
if nicks != None:
for n in nicks:
for e in email_providers.domains:
candidate_emails.add("{}@{}".format(n, e))
elif nicksFile != None:
with open(nicksFile, "r") as iF:
nicks = ... | Method that globally permits to generate the emails to be checked.
Args:
-----
nicks: List of aliases.
nicksFile: The filepath to the aliases file.
Returns:
--------
list: list of emails to be checked. | juraj-google-style |
def share(self, name, item):
try:
if isinstance(item, s_telepath.Aware):
item.onTeleShare(self, name)
self.shared[name] = item
except Exception:
logger.exception(f'onTeleShare() error for: {name}') | Share an object via the telepath protocol.
Args:
name (str): Name of the shared object
item (object): The object to share over telepath. | juraj-google-style |
def encoder(self, inputs, n_layers=3):
latent_dims = self.hparams.z_dim
shape_as_list = inputs.shape.as_list()
if len(shape_as_list) != 5:
raise ValueError("Expected inputs to be a 5-D, got %d" %
len(shape_as_list))
if inputs.dtype != tf.float32:
raise ValueError... | Convnet that encodes inputs into mean and std of a gaussian.
Args:
inputs: 5-D Tensor, shape (batch_size, num_frames, width, height, channels)
n_layers: Number of layers.
Returns:
z_mu: Mean of the latent gaussians.
z_log_var: log(var) of the latent gaussians.
Raises:
ValueError: If inputs is not a 5-D tensor or not... | juraj-google-style |
def pdb_downloader_and_metadata(self, outdir=None, pdb_file_type=None, force_rerun=False):
if not pdb_file_type:
pdb_file_type = self.pdb_file_type
counter = 0
for g in tqdm(self.genes):
pdbs = g.protein.pdb_downloader_and_metadata(outdir=outdir, pdb_file_type=... | Download ALL mapped experimental structures to each protein's structures directory.
Args:
outdir (str): Path to output directory, if GEM-PRO directories were not set or other output directory is
desired
pdb_file_type (str): Type of PDB file to download, if not already set or other format is desired
force_rerun (bool):... | juraj-google-style |
def StartsWith(self, value):
self._awql = self._CreateSingleValueCondition(value, 'STARTS_WITH')
return self._query_builder | Sets the type of the WHERE clause as "starts with".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | juraj-google-style |
def write_float(self, value, little_endian=True):
if little_endian:
endian = "<"
else:
endian = ">"
return self.pack('%sf' % endian, value) | Pack the value as a float and write 4 bytes to the stream.
Args:
value (number): the value to write to the stream.
little_endian (bool): specify the endianness. (Default) Little endian.
Returns:
int: the number of bytes written. | juraj-google-style |
def get_params(img, output_size):
w, h, *_ = img.shape
th, tw = output_size
if w == tw and h == th:
return 0, 0, h, w
i = random.randint(0, h - th)
j = random.randint(0, w - tw)
return i, j, th, tw | Get parameters for ``crop`` for a random crop.
Args:
img (PIL Image): Image to be cropped.
output_size (tuple): Expected output size of the crop.
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for random crop. | juraj-google-style |
def _GetInstanceConfig(self):
try:
instance_data = self.metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data = self.metadata_dict['project']['attributes']
except KeyError:
... | Get the instance configuration specified in metadata.
Returns:
string, the instance configuration data. | codesearchnet |
def compute_output_shape(self, input_shape):
if context.executing_eagerly():
self._maybe_build(input_shape)
with func_graph.FuncGraph(str(self.name) + '_scratch_graph').as_default():
input_shape = tf_utils.convert_shapes(input_shape, to_tuples=False)
def _make_placeholder_li... | Computes the output shape of the layer.
If the layer has not been built, this method will call `build` on the
layer. This assumes that the layer will later be used with inputs that
match the input shape provided here.
Args:
input_shape: Shape tuple (tuple of integers)
or list of shape tuples (one per output tensor of... | github-repos |
def check_managed_pipeline(name='', app_name=''):
*pipeline_name_prefix, bracket_region = name.split()
region = bracket_region.strip('[]')
not_managed_message = '"{0}" is not managed.'.format(name)
if 'onetime' in region:
LOG.info('"%s" is a onetime, marked for cleaning.', name)
r... | Check a Pipeline name is a managed format **app_name [region]**.
Args:
name (str): Name of Pipeline to check.
app_name (str): Name of Application to find in Pipeline name.
Returns:
str: Region name from managed Pipeline name.
Raises:
ValueError: Pipeline is not managed. | juraj-google-style |
def _create_inbound_stream(self, config=None):
if config is None:
raise ValueError('No stream config to create stream from.')
name = self._get_stream_name(config)
stream_handlers = self._get_stream_handlers(config, name)
stream_input = config.get('input', None)
... | Creates an inbound stream from its config.
Params:
config: stream configuration as read by ait.config
Returns:
stream: a Stream
Raises:
ValueError: if any of the required config values are missing | juraj-google-style |
def list_media_services_rg(access_token, subscription_id, rgname):
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', rgname, '/providers/microsoft.media/mediaservices?api-version=', MEDIA_API])
return do_get(endpoint, access_token) | List the media services in a resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. JSON body. | codesearchnet |
def create_media_assetfile(access_token, parent_asset_id, name, is_primary='false', is_encrypted='false', encryption_scheme='None', encryptionkey_id='None'):
path = '/Files'
endpoint = ''.join([ams_rest_endpoint, path])
if (encryption_scheme == 'StorageEncryption'):
body = (((((((((((((('{ \t\t\t"Is... | Create Media Service Asset File.
Args:
access_token (str): A valid Azure authentication token.
parent_asset_id (str): Media Service Parent Asset ID.
name (str): Media Service Asset Name.
is_primary (str): Media Service Primary Flag.
is_encrypted (str): Media Service Encryption Flag.
encryption_scheme (str): Media Serv... | codesearchnet |
def update_dict_recursive(editable_dict: dict, editing_dict: dict) -> None:
for k, v in editing_dict.items():
if isinstance(v, collections.Mapping):
update_dict_recursive(editable_dict.get(k, {}), v)
else:
editable_dict[k] = v | Updates dict recursively
You need to use this function to update dictionary if depth of editing_dict is more then 1
Args:
editable_dict: dictionary, that will be edited
editing_dict: dictionary, that contains edits
Returns:
None | juraj-google-style |
def panel(self, panel_id):
if (not isinstance(panel_id, ObjectId)):
panel_id = ObjectId(panel_id)
panel_obj = self.panel_collection.find_one({'_id': panel_id})
return panel_obj | Fetch a gene panel by '_id'.
Args:
panel_id (str, ObjectId): str or ObjectId of document ObjectId
Returns:
dict: panel object or `None` if panel not found | codesearchnet |
def BuildFindSpecs(self, artifact_filter_names, environment_variables=None):
find_specs = []
for name in artifact_filter_names:
definition = self._artifacts_registry.GetDefinitionByName(name)
if not definition:
logger.debug('undefined artifact definition: {0:s}'.format(name))
co... | Builds find specifications from artifact definitions.
Args:
artifact_filter_names (list[str]): names of artifact definitions that are
used for filtering file system and Windows Registry key paths.
environment_variables (Optional[list[EnvironmentVariableArtifact]]):
environment variables. | juraj-google-style |
def phase_uniquizer(all_phases):
measurement_name_maker = UniqueNameMaker(itertools.chain.from_iterable((phase.measurements.keys() for phase in all_phases if phase.measurements)))
attachment_names = list(itertools.chain.from_iterable((phase.attachments.keys() for phase in all_phases)))
attachment_names.exte... | Makes the names of phase measurement and attachments unique.
This function will make the names of measurements and attachments unique.
It modifies the input all_phases.
Args:
all_phases: the phases to make unique
Returns:
the phases now modified. | codesearchnet |
def install_js():
target_jsdir = join(SERVER, 'static', 'js')
target_cssdir = join(SERVER, 'static', 'css')
target_tslibdir = join(SERVER, 'static', 'lib')
STATIC_ASSETS = [join(JS, 'bokeh.js'), join(JS, 'bokeh.min.js'), join(CSS, 'bokeh.css'), join(CSS, 'bokeh.min.css')]
if (not all((exists(a) for ... | Copy built BokehJS files into the Python source tree.
Returns:
None | codesearchnet |
def __init__(self, **kwargs):
self.unitname = kwargs.get('unitname', self.unitname)
self.unitmultiplier = kwargs.get('unitmultiplier', self.unitmultiplier) | Distance unit parameter.
Args:
- **unitname**: A pycrs.elements.units.UnitName instance with the name given by each supported format.
- **unitmultiplier**: A pycrs.elements.units.UnitMultiplier instance. | juraj-google-style |
def set_lock_state(self, code, device_label, state):
response = None
try:
response = requests.put(
urls.set_lockstate(self._giid, device_label, state),
headers={
'Accept': 'application/json, text/javascript, */*; q=0.01',
... | Lock or unlock
Args:
code (str): Lock code
device_label (str): device label of lock
state (str): 'lock' or 'unlock' | juraj-google-style |
def print_layer_summary(layer):
try:
output_shape = layer.output_shape
except AttributeError:
output_shape = 'multiple'
except RuntimeError:
output_shape = '?'
name = layer.name
cls_name = layer.__class__.__name__
if not layer.built and (not getattr(layer, '_is_graph_netw... | Prints a summary for a single layer.
Args:
layer: target layer. | github-repos |
def init_cache(self, batch_size, max_length, encoder_outputs):
decoder_input_ids = jnp.ones((batch_size, max_length), dtype='i4')
decoder_attention_mask = jnp.ones_like(decoder_input_ids)
decoder_position_ids = jnp.broadcast_to(jnp.arange(jnp.atleast_2d(decoder_input_ids).shape[-1]), decoder_input_ids.shape... | Args:
batch_size (`int`):
batch_size used for fast auto-regressive decoding. Defines the batch size of the initialized cache.
max_length (`int`):
maximum possible length for auto-regressive decoding. Defines the sequence length of the initialized
cache.
encoder_outputs (`Union[FlaxBaseModelOutput, tuple(tuple(jnp.ndarr... | github-repos |
def all_events_filter(
self,
from_block: BlockSpecification = GENESIS_BLOCK_NUMBER,
to_block: BlockSpecification = 'latest',
) -> StatelessFilter:
return self.events_filter(None, from_block, to_block) | Install a new filter for all the events emitted by the current token network contract
Args:
from_block: Create filter starting from this block number (default: 0).
to_block: Create filter stopping at this block number (default: 'latest').
Return:
The filter instance. | juraj-google-style |
def _get_operation_input_field_values(self, metadata, file_input):
input_args = metadata['request']['ephemeralPipeline']['inputParameters']
vals_dict = metadata['request']['pipelineArgs']['inputs']
names = [
arg['name'] for arg in input_args if ('localCopy' in arg) == file... | Returns a dictionary of envs or file inputs for an operation.
Args:
metadata: operation metadata field
file_input: True to return a dict of file inputs, False to return envs.
Returns:
A dictionary of input field name value pairs | juraj-google-style |
def _PrintAnalysisStatusHeader(self, processing_status):
self._output_writer.Write('Storage file\t\t: {0:s}\n'.format(self._storage_file_path))
self._PrintProcessingTime(processing_status)
if (processing_status and processing_status.events_status):
self._PrintEventsStatus(processing_status.events_st... | Prints the analysis status header.
Args:
processing_status (ProcessingStatus): processing status. | codesearchnet |
def accuracy(y_true: [list, np.ndarray], y_predicted: [list, np.ndarray]) -> float:
examples_len = len(y_true)
correct = sum([y1 == y2 for y1, y2 in zip(y_true, y_predicted)])
return correct / examples_len if examples_len else 0 | Calculate accuracy in terms of absolute coincidence
Args:
y_true: array of true values
y_predicted: array of predicted values
Returns:
portion of absolutely coincidental samples | juraj-google-style |
def diff(self) -> List[str]:
return set(self.to_track.keys()) - self._seen | This method returns a set difference between the keys in the tracked state dict and the one we have access so far.
This is an effective method to check if we have update all the keys
Returns:
List[str]: List of keys not yet updated | github-repos |
def draw_point(self, x, y):
check_int_err(lib.SDL_RenderDrawPoint(self._ptr, x, y)) | Draw a point on the current rendering target.
Args:
x (int): The x coordinate of the point.
y (int): The y coordinate of the point.
Raises:
SDLError: If an error is encountered. | codesearchnet |
def calculate_subscription_lifecycle(subscription_id):
subscription = Subscription.objects.select_related("messageset", "schedule").get(
id=subscription_id
)
behind = subscription.messages_behind()
if behind == 0:
return
current_messageset = subscription.messageset
current_... | Calculates the expected lifecycle position the subscription in
subscription_ids, and creates a BehindSubscription entry for them.
Args:
subscription_id (str): ID of subscription to calculate lifecycle for | juraj-google-style |
async def leave_conversation(self, conv_id):
logger.info('Leaving conversation: {}'.format(conv_id))
await self._conv_dict[conv_id].leave()
del self._conv_dict[conv_id] | Leave a conversation.
Args:
conv_id (str): ID of conversation to leave. | juraj-google-style |
def _MeanAggregator(inputs, segments):
result = []
for inputs_i, segments_i in zip(array_ops.split(inputs, inputs.shape[0]), array_ops.split(segments, segments.shape[0])):
means_i = math_ops.unsorted_segment_mean(inputs_i, segments_i, num_segments=math_ops.reduce_max(segments_i) + 1)
result.appe... | Replaces each segment with its mean along the last axis.
Specifically, each value in the `inputs` tensor gets replaced by the mean
value computed from the values that belong to the same segment.
Args:
inputs: A 2-tensor. Aggregation is done over dimension 1.
segments: A 2-tensor, same shape as `input`.
Returns:
The ... | github-repos |
def __process_instr(self, instr, avoid, next_addr, initial_state, execution_state, trace_current):
if instr.mnemonic == ReilMnemonic.JCC:
not_taken_addr = next_addr
address, index = split_address(instr.address)
logger.debug("[+] Processing branch: {:
... | Process a REIL instruction.
Args:
instr (ReilInstruction): Instruction to process.
avoid (list): List of addresses to avoid while executing the code.
next_addr (int): Address of the following instruction.
initial_state (State): Initial execution state.
execution_state (Queue): Queue of execution states.
trace_current ... | juraj-google-style |
def _op_in_graph_mode(tensor):
if context.executing_eagerly():
return tensor
return tensor.op | Returns the tensor's op in graph mode, or the tensor in eager mode.
This is useful because sometimes an op is needed in graph mode instead of a
tensor. In eager mode, there are no ops.
Args:
tensor: A tensor.
Returns:
The tensor's op in graph mode. The tensor in eager mode. | github-repos |
def download(url, output_file=None, open_file=True, allow_overwrite=False):
filename = url.split('/')[-1]
if output_file is None:
cache = os.path.join(get_data_home(), filename)
else:
cache = output_file
if os.path.exists(cache) and not allow_overwrite:
logger.info("> {} alr... | Download a file from URL.
Args:
url (str): URL.
output_file (str, optional): If given, the downloaded file is written to the given path.
open_file (bool): If True, it returns an opened file stream of the downloaded file.
allow_overwrite (bool): If True, it overwrites an existing file.
Returns:
Returns file object if ... | juraj-google-style |
def on_click(self, handler):
self.on_event(ButtonClick, handler)
self.on_event(MenuItemClick, handler) | Set up a handler for button or menu item clicks.
Args:
handler (func) : handler function to call when button is activated.
Returns:
None | codesearchnet |
def delete_metadata(self, resource, keys):
self.metadata_service.set_auth(self._token_metadata)
self.metadata_service.delete(resource, keys) | Deletes the given key-value pairs associated with the given resource.
Will attempt to delete all key-value pairs even if some fail.
Args:
resource (intern.resource.boss.BossResource)
keys (list)
Raises:
HTTPErrorList on failure. | juraj-google-style |
def resample(self, size, interpolation=gdalconst.GRA_NearestNeighbour):
factors = (size[0] / float(self.RasterXSize),
size[1] / float(self.RasterYSize))
affine = AffineTransform(*tuple(self.affine))
affine.scale = (affine.scale[0] / factors[0],
... | Returns a new instance resampled to provided size.
Arguments:
size -- tuple of x,y image dimensions | juraj-google-style |
def subset_gctoo(gctoo, row_bool=None, col_bool=None, rid=None, cid=None, ridx=None, cidx=None, exclude_rid=None, exclude_cid=None):
assert (sum([(rid is not None), (row_bool is not None), (ridx is not None)]) <= 1), 'Only one of rid, row_bool, and ridx can be provided.'
assert (sum([(cid is not None), (col_boo... | Extract a subset of data from a GCToo object in a variety of ways.
The order of rows and columns will be preserved.
Args:
gctoo (GCToo object)
row_bool (list of bools): length must equal gctoo.data_df.shape[0]
col_bool (list of bools): length must equal gctoo.data_df.shape[1]
rid (list of strings): rids to include
cid... | codesearchnet |
def reset_port_protection(self, id_or_uri, timeout=(- 1)):
uri = (self._client.build_uri(id_or_uri) + '/resetportprotection')
return self._client.update_with_zero_body(uri, timeout) | Triggers a reset of port protection.
Cause port protection to be reset on all the interconnects of the logical interconnect that matches ID.
Args:
id_or_uri: Can be either the interconnect id or the interconnect uri.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the oper... | codesearchnet |
def add_gene_info(self, variant_obj, gene_panels=None):
gene_panels = (gene_panels or [])
variant_obj['has_refseq'] = False
extra_info = {}
for panel_obj in gene_panels:
for gene_info in panel_obj['genes']:
hgnc_id = gene_info['hgnc_id']
if (hgnc_id not in extra_info):
... | Add extra information about genes from gene panels
Args:
variant_obj(dict): A variant from the database
gene_panels(list(dict)): List of panels from database | codesearchnet |
def calculate_3D_elastic_energy(self, film, match, elasticity_tensor=None, include_strain=False):
if (elasticity_tensor is None):
return 9999
struc = SlabGenerator(self.film, match['film_miller'], 20, 15, primitive=False).get_slab().oriented_unit_cell
film_matrix = list(match['film_sl_vecs'])
fi... | Calculates the multi-plane elastic energy. Returns 999 if no elastic
tensor was given on init
Args:
film(Structure): conventional standard structure for the film
match(dictionary) : match dictionary from substrate analyzer
elasticity_tensor(ElasticTensor): elasticity tensor for the film
include_strain(bool): include s... | codesearchnet |
def _fetch_certs(request, certs_url):
response = request(certs_url, method='GET')
if (response.status != http_client.OK):
raise exceptions.TransportError('Could not fetch certificates at {}'.format(certs_url))
return json.loads(response.data.decode('utf-8')) | Fetches certificates.
Google-style cerificate endpoints return JSON in the format of
``{'key id': 'x509 certificate'}``.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
certs_url (str): The certificate endpoint URL.
Returns:
Mapping[str, str]: A mapping of public key ID to x.509... | codesearchnet |
def get_collectors(self, limit=1000, offset=0):
options = {'limit': limit, 'offset': offset}
request = requests.get(self.url, params=options, auth=self.auth)
try:
results = request.json()['collectors']
except KeyError:
results = request.json()
except json.decoder.JSONDecodeError:
... | Returns a dict of collectors.
Args:
limit (int): number of collectors to return
offset (int): the offset of where the list of collectors should begin from | codesearchnet |
def draw_layer(ax, layer):
ax.set_aspect('equal', 'datalim')
ax.plot(*layer)
ax.axis('off') | Draws a layer on the given matplotlib axis.
Args:
ax (axis): the matplotlib axis to draw on
layer (layer): the layers to plot | codesearchnet |
def _print_results(file, status):
file_color = c.Fore.GREEN
status_color = c.Fore.RED
if (status == 'Success'):
status_color = c.Fore.GREEN
elif (status == 'Skipped'):
status_color = c.Fore.YELLOW
print('{}{!s:<13}{}{!s:<35}{}{!s:<8}{}{}'.format(c.Fore.CYAN, 'Downloading:', file_colo... | Print the download results.
Args:
file (str): The filename.
status (str): The file download status. | codesearchnet |
def __init__(self, additional_note='', kwargs_dict=None):
self._additional_note = additional_note
if kwargs_dict:
bullets = []
for key in sorted(kwargs_dict.keys()):
value = kwargs_dict[key]
if any((x.isspace() for x in key)):
raise ValueError('Parameter n... | Initializes the AppendDocstring object.
Args:
additional_note: Python string added as additional docstring to public
version of function.
kwargs_dict: Python string/string dictionary representing specific kwargs
expanded from the **kwargs input.
Raises:
ValueError: if kwargs_dict.key contains whitespace.
ValueError: ... | github-repos |
def bidiagonalize_real_matrix_pair_with_symmetric_products(mat1: np.ndarray, mat2: np.ndarray, *, rtol: float=1e-05, atol: float=1e-08, check_preconditions: bool=True) -> Tuple[(np.ndarray, np.ndarray)]:
if check_preconditions:
if np.any((np.imag(mat1) != 0)):
raise ValueError('mat1 must be real... | Finds orthogonal matrices that diagonalize both mat1 and mat2.
Requires mat1 and mat2 to be real.
Requires mat1.T @ mat2 to be symmetric.
Requires mat1 @ mat2.T to be symmetric.
Args:
mat1: One of the real matrices.
mat2: The other real matrix.
rtol: Relative numeric error threshold.
atol: Absolute numeric error thre... | codesearchnet |
def get_config():
profiles = {}
curr = None
cmd = ['netsh', 'advfirewall', 'show', 'allprofiles']
ret = __salt__['cmd.run_all'](cmd, python_shell=False, ignore_retcode=True)
if (ret['retcode'] != 0):
raise CommandExecutionError(ret['stdout'])
for line in ret['stdout'].splitlines():
... | Get the status of all the firewall profiles
Returns:
dict: A dictionary of all profiles on the system
Raises:
CommandExecutionError: If the command fails
CLI Example:
.. code-block:: bash
salt '*' firewall.get_config | codesearchnet |
def _GetBetweenQEqualsAndAmpersand(self, url):
(_, _, url) = url.partition('?')
(_, _, url) = url.partition('q=')
if (not url):
return ''
(url, _, _) = url.partition('&')
return url | Retrieves the substring between the substrings 'q=' and '&'.
Args:
url (str): URL.
Returns:
str: search query, the value between 'q=' and '&' or None if no query
was found. | codesearchnet |
def recursive_copy(src_dir, dest_dir):
file_io.recursive_create_dir(dest_dir)
for file_name in file_io.list_directory(src_dir):
old_path = os.path.join(src_dir, file_name)
new_path = os.path.join(dest_dir, file_name)
if file_io.is_directory(old_path):
recursive_copy(old_path, new_path)
el... | Copy the contents of src_dir into the folder dest_dir.
Args:
src_dir: gsc or local path.
dest_dir: gcs or local path. | juraj-google-style |
def polyFitIgnoringOutliers(
x, y, deg=2, niter=3, nstd=2, return_outliers=False):
if return_outliers:
a = all_outliers = np.zeros_like(y, dtype=bool)
for i in range(niter):
poly = np.polyfit(x, y, deg)
p = np.poly1d(poly)
if i == niter - 1:
break
... | Returns:
(np.poly1d): callable function of polynomial fit excluding all outliers
Args:
deg (int): degree of polynomial fit
n_iter (int): do linear regression n times
successive removing
nstd (float): exclude outliers, if their deviation
is > [nstd] * standard deviation
return_outliers (bool): also return outlier positi... | juraj-google-style |
def registerAccount(self, person, vendorSpecific=None):
response = self.registerAccountResponse(person, vendorSpecific)
return self._read_boolean_response(response) | See Also: registerAccountResponse()
Args:
person:
vendorSpecific:
Returns: | juraj-google-style |
def get_or_create(session, model, **kwargs):
instance = session.query(model).filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
if 'dataset' in kwargs:
instance.update_sequence_id(session, kwargs['dataset'])
sessi... | Get or create sqlalchemy instance.
Args:
session (Sqlalchemy session):
model (sqlalchemy model):
kwargs (dict): kwargs to lookup or create instance.
Returns:
Tuple: first element is found or created instance, second is boolean - True if instance created,
False if instance found. | juraj-google-style |
def absl_to_cpp(level):
if not isinstance(level, int):
raise TypeError('Expect an int level, found {}'.format(type(level)))
if level >= 0:
return 0
else:
return -level | Converts an absl log level to a cpp log level.
Args:
level: int, an absl.logging level.
Raises:
TypeError: Raised when level is not an integer.
Returns:
The corresponding integer level for use in Abseil C++. | juraj-google-style |
def report_server_init_errors(address=None, port=None, **kwargs):
try:
(yield)
except EnvironmentError as e:
if (e.errno == errno.EADDRINUSE):
log.critical('Cannot start Bokeh server, port %s is already in use', port)
elif (e.errno == errno.EADDRNOTAVAIL):
log.cri... | A context manager to help print more informative error messages when a
``Server`` cannot be started due to a network problem.
Args:
address (str) : network address that the server will be listening on
port (int) : network address that the server will be listening on
Example:
.. code-block:: python
with report_serv... | codesearchnet |
def get(self, url, params=None, **kwargs):
return self.call_api(
"GET",
url,
params=params,
**kwargs
) | Call the API with a GET request.
Args:
url (str): Resource location relative to the base URL.
params (dict or None): Query-string parameters.
Returns:
ResultParser or ErrorParser. | juraj-google-style |
def optimize_boolean_expression_comparisons(ir_blocks):
operator_inverses = {u'=': u'!=', u'!=': u'='}
def visitor_fn(expression):
'Expression visitor function that performs the above rewriting.'
if (not isinstance(expression, BinaryComposition)):
return expression
left_is_b... | Optimize comparisons of a boolean binary comparison expression against a boolean literal.
Rewriting example:
BinaryComposition(
'=',
BinaryComposition('!=', something, NullLiteral)
False)
The above is rewritten into:
BinaryComposition('=', something, NullLiteral)
Args:
ir_blocks: list of basic block objects
Returns... | codesearchnet |
def cudnn_stacked_bi_gru(units, n_hidden, seq_lengths=None, n_stacks=2, keep_prob=1.0, concat_stacked_outputs=False, trainable_initial_states=False, name='cudnn_stacked_bi_gru', reuse=False):
if (seq_lengths is None):
seq_lengths = (tf.ones([tf.shape(units)[0]], dtype=tf.int32) * tf.shape(units)[1])
out... | Fast CuDNN Stacked Bi-GRU implementation
Args:
units: tf.Tensor with dimensions [B x T x F], where
B - batch size
T - number of tokens
F - features
n_hidden: dimensionality of hidden state
seq_lengths: number of tokens in each sample in the batch
n_stacks: number of stacked Bi-GRU
keep_prob: dropout keep_prob between ... | codesearchnet |
def easeInOutExpo(n):
_checkRange(n)
if n == 0:
return 0
elif n == 1:
return 1
else:
n = n * 2
if n < 1:
return 0.5 * 2**(10 * (n - 1))
else:
n -= 1
return 0.5 * (-1 * (2 ** (-10 * n)) + 2) | An exponential tween function that accelerates, reaches the midpoint, and then decelerates.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). | juraj-google-style |
def _prepare_4d_causal_attention_mask_with_cache_position(attention_mask: torch.Tensor, sequence_length: int, target_length: int, dtype: torch.dtype, cache_position: torch.Tensor, batch_size: int, **kwargs):
if attention_mask is not None and attention_mask.dim() == 4:
causal_mask = attention_mask
else:
... | Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape
`(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing.
Args:
attention_mask (`torch.Tensor`):
A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attentio... | github-repos |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.