code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def build_images(prefix, images, tag=None, commit_range=None, push=False, chart_version=None):
value_modifications = {}
for (name, options) in images.items():
image_path = options.get('contextPath', os.path.join('images', name))
image_tag = tag
paths = (list(options.get('paths', [])) + [... | Build a collection of docker images
Args:
prefix (str): the prefix to add to images
images (dict): dict of image-specs from chartpress.yml
tag (str):
Specific tag to use instead of the last modified commit.
If unspecified the tag for each image will be the hash of the last commit
to modify the image's files.
commit_ra... | codesearchnet |
def create_token_type_ids_from_sequences(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]:
if token_ids_1 is None:
return (len(token_ids_0) + 2) * [0]
return [0] * (len(token_ids_0) + 1) + [1] * (len(token_ids_1) + 3) | Create the token type IDs corresponding to the sequences passed. [What are token type
IDs?](../glossary#token-type-ids) Should be overridden in a subclass if the model has a special way of
building: those.
Args:
token_ids_0 (`List[int]`):
The first tokenized sequence.
token_ids_1 (`List[int]`, *optional*):
The second ... | github-repos |
def from_orbit(cls, orbit, name=None, norad_id=None, cospar_id=None):
name = "0 %s\n" % name if name is not None else ""
norad_id = norad_id if norad_id is not None else "99999"
if cospar_id is not None:
y, _, i = cospar_id.partition('-')
cospar_id = y[2:] + i
... | Convert an orbit to it's TLE representation
Args:
orbit (Orbit)
norad_id (str or int):
cospar_id (str):
Return:
str: TLE representation | juraj-google-style |
def sequential_experts_gemm(token_states, expert_weights, tokens_per_expert):
num_tokens = token_states.shape[0]
out_features = expert_weights.shape[-1]
output = torch.zeros(num_tokens, out_features, dtype=token_states.dtype, device=token_states.device)
cumsum_num_tokens = torch.cumsum(tokens_per_expert... | Compute the matrix multiplication (GEMM) for each expert sequentially. This approach is computationally inefficient, especially when dealing with a large number of experts.
Args:
token_states (torch.Tensor): Input tensor of shape (num_tokens, in_features).
expert_weights (torch.Tensor): Weight tensor of shape (num_exp... | github-repos |
def get_asset(self, asset_hash, id=None, endpoint=None):
return self._call_endpoint(GET_ASSET_STATE, params=[asset_hash], id=id, endpoint=endpoint) | Get an asset by its hash
Args:
asset_hash: (str) asset to lookup, example would be 'c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b'
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encounte... | juraj-google-style |
def en010(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `en010`'.format(value))
self._en010 = value | Corresponds to IDD Field `en010`
mean coincident dry-bulb temperature to
Enthalpy corresponding to 1.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `en010`
Unit: kJ/kg
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises... | juraj-google-style |
def set_management_icmp(enabled=True, deploy=False):
if (enabled is True):
value = 'no'
elif (enabled is False):
value = 'yes'
else:
raise CommandExecutionError('Invalid option provided for service enabled option.')
ret = {}
query = {'type': 'config', 'action': 'set', 'xpath'... | Enables or disables the ICMP management service on the device.
CLI Example:
Args:
enabled (bool): If true the service will be enabled. If false the service will be disabled.
deploy (bool): If true then commit the full candidate configuration, if false only set pending change.
.. code-block:: bash
salt '*' panos.se... | codesearchnet |
def from_string(cls, key, password='notasecret'):
key = _helpers._from_bytes(key)
marker_id, key_bytes = pem.readPemBlocksFromFile(
six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER)
if marker_id == 0:
pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes,
... | Construct an RsaSigner instance from a string.
Args:
key: string, private key in PEM format.
password: string, password for private key file. Unused for PEM
files.
Returns:
RsaSigner instance.
Raises:
ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in
PEM format. | juraj-google-style |
def _get_scripts(self, host_metadata):
deploy_scripts = host_metadata.get('deploy-scripts', [])
if deploy_scripts:
return deploy_scripts
ovirt_scripts = host_metadata.get('ovirt-scripts', [])
if ovirt_scripts:
warnings.warn('Deprecated entry "ovirt-scripts" will not be supported in the f... | Temporary method to retrieve the host scripts
TODO:
remove once the "ovirt-scripts" option gets deprecated
Args:
host_metadata(dict): host metadata to retrieve the scripts for
Returns:
list: deploy scripts for the host, empty if none found | codesearchnet |
def _ModifyInterface(self, interface_config, config_key, config_value, replace=False):
config_entry = ('%s=%s' % (config_key, config_value))
if (not open(interface_config).read().count(config_key)):
with open(interface_config, 'a') as config:
config.write(('%s\n' % config_entry))
elif re... | Write a value to a config file if not already present.
Args:
interface_config: string, the path to a config file.
config_key: string, the configuration key to set.
config_value: string, the value to set for the configuration key.
replace: bool, replace the configuration option if already present. | codesearchnet |
def readHolidayDates(self):
self.setContext('readHolidayDates')
try:
req_str = '0152310230304230282903'
self.request(False)
req_crc = self.calc_crc16(req_str[2:].decode('hex'))
req_str += req_crc
self.m_serial_port.write(req_str.decode('hex'))
raw_ret = self.m_ser... | Serial call to read holiday dates into meter object buffer.
Returns:
bool: True on completion. | codesearchnet |
def _stratonovich_integral(dim, dt, sqrt_dt, dw, stratonovich_draws, order):
p = order - 1
sqrt_rho_p = tf.sqrt(tf.constant(1 / 12 - sum([1 / r ** 2 for r in range(1, order + 1)]) / 2 / _PI ** 2, dtype=dw.dtype))
mu = stratonovich_draws[0]
zeta = tf.transpose(stratonovich_draws[1], [2, 0, 1])
eta = ... | Approximate Stratonovich integrals J(i, j).
Args:
dim: An integer. The dimension of the state.
dt: A double. The time step.
sqrt_dt: A double. The square root of dt.
dw: A double. The Wiener increment.
stratonovich_draws: A list of tensors corresponding to the independent
N(0,1) random variables used in the approxim... | github-repos |
def get_filters(component):
def inner(c, filters=None):
filters = (filters or set())
if (not ENABLED):
return filters
if (not plugins.is_datasource(c)):
return filters
if (c in FILTERS):
filters |= FILTERS[c]
for d in dr.get_dependents(c):... | Get the set of filters for the given datasource.
Filters added to a ``RegistryPoint`` will be applied to all datasources that
implement it. Filters added to a datasource implementation apply only to
that implementation.
For example, a filter added to ``Specs.ps_auxww`` will apply to
``DefaultSpecs.ps_auxww``, ``Insig... | codesearchnet |
def fill_tree(self, tree, input_dict):
def add_element(item, key, value):
child_name = QtGui.QStandardItem(key)
child_name.setDragEnabled(False)
child_name.setSelectable(False)
child_name.setEditable(False)
if isinstance(value, dict):
... | fills a tree with nested parameters
Args:
tree: QtGui.QTreeView
parameters: dictionary or Parameter object
Returns: | juraj-google-style |
def _requests_post(self, url, json=None, data=None, username='', password='', xapikey='', headers=None, timeout=30):
if (headers is None):
headers = {}
auth = None
if (username and password):
auth = requests.auth.HTTPBasicAuth(username, password)
elif xapikey:
headers['x-api-key'... | This function will POST to the url endpoint using requests.
Returning an AdyenResult object on 200 HTTP response.
Either json or data has to be provided.
If username and password are provided, basic auth will be used.
Args:
url (str): url to send the POST
json (dict, optional): Dict of the JSON to POST
data (dict, op... | codesearchnet |
def outgoing_args(self, nodeid):
_vars = self._vars
_hcons = self._hcons
args = self.args(nodeid)
for arg, val in list(args.items()):
if arg == IVARG_ROLE or val not in _vars:
del args[arg]
else:
refs = _vars... | Return the arguments going from *nodeid* to other predications.
Valid arguments include regular variable arguments and scopal
(label-selecting or HCONS) arguments. MOD/EQ
links, intrinsic arguments, and constant arguments are not
included.
Args:
nodeid: the nodeid of the EP that is the arguments' source
Returns:
dict... | juraj-google-style |
def TransformerEncoder(vocab_size, num_classes=10, feature_depth=512, feedforward_depth=2048, num_layers=6, num_heads=8, dropout=0.1, max_len=2048, mode='train'):
input_embedding = layers.Serial(layers.Embedding(feature_depth, vocab_size), layers.Dropout(rate=dropout, mode=mode), layers.PositionalEncoding(max_len=m... | Transformer encoder.
Args:
vocab_size: int: vocab size
num_classes: how many classes on output
feature_depth: int: depth of embedding
feedforward_depth: int: depth of feed-forward layer
num_layers: int: number of encoder/decoder layers
num_heads: int: number of attention heads
dropout: float: dropout rate (how much t... | codesearchnet |
def dms_maker(self, force_rerun=False):
log.debug('{}: running surface representation maker...'.format(self.id))
if not self.receptorpdb_path:
return ValueError('Please run protein_only_and_noH')
dms = op.join(self.dock_dir, '{}_receptor.dms'.format(self.id))
if s... | Create surface representation (dms file) of receptor
Args:
force_rerun (bool): If method should be rerun even if output file exists | juraj-google-style |
def stage_tc_create_security_label(self, label, resource):
sl_resource = resource.security_labels(label)
sl_resource.http_method = 'POST'
sl_response = sl_resource.request()
if sl_response.get('status') != 'Success':
self.log.warning(
'[tcex] Failed a... | Add a security label to a resource.
Args:
label (str): The security label (must exit in ThreatConnect).
resource (obj): An instance of tcex resource class. | juraj-google-style |
def __init__(self, n=3, cap_front=True, cap_end=True):
if n < 2:
raise ValueError('n must be 1 or more')
super(Alkane, self).__init__()
if not cap_front:
n += 1
if not cap_end:
n += 1
chain = mb.recipes.Polymer(CH2(), n=n-2, ... | Initialize an Alkane Compound.
Args:
n: Number of carbon atoms.
cap_front: Add methyl group to beginning of chain ('down' port).
cap_end: Add methyl group to end of chain ('up' port). | juraj-google-style |
def ping(self, timeout=12):
self.conn("POST", "{0}/users/ME/endpoints/{1}/active".format(self.conn.msgsHost, self.id),
auth=SkypeConnection.Auth.RegToken, json={"timeout": timeout}) | Send a keep-alive request for the endpoint.
Args:
timeout (int): maximum amount of time for the endpoint to stay active | juraj-google-style |
def config():
out = shell.run('git config --list', capture=True, never_pretend=True).stdout.strip()
result = {}
for line in out.splitlines():
(name, value) = line.split('=', 1)
result[name.strip()] = value.strip()
return result | Return the current git configuration.
Returns:
dict[str, Any]: The current git config taken from ``git config --list``. | codesearchnet |
def l2_regression_loss(y, target, name=None):
with tf.name_scope(name, 'l2_regression', [y, target]) as scope:
y = tf.convert_to_tensor(y, name='y')
target = tf.convert_to_tensor(target, name='target')
return tf.sqrt(l2_regression_sq_loss(y, target, name=scope)) | Calculates the square root of the SSE between y and target.
Args:
y: the calculated values.
target: the desired values.
name: the name for this op, defaults to l2_regression
Returns:
A tensorflow op. | juraj-google-style |
def dict_from_file(filename, key_type=str):
mapping = {}
with open(filename, 'r') as f:
for line in f:
items = line.rstrip('\n').split()
assert (len(items) >= 2)
key = key_type(items[0])
val = (items[1:] if (len(items) > 2) else items[1])
mappi... | Load a text file and parse the content as a dict.
Each line of the text file will be two or more columns splited by
whitespaces or tabs. The first column will be parsed as dict keys, and
the following columns will be parsed as dict values.
Args:
filename(str): Filename.
key_type(type): Type of the dict's keys. str is... | codesearchnet |
def fn_with_custom_grad(grad_fn, use_global_vars=False):
def dec(fn):
@functools.wraps(fn)
def wrapped(*args):
return _fn_with_custom_grad(fn, args, grad_fn, use_global_vars=use_global_vars)
return wrapped
return dec | Decorator to create a subgraph with a custom gradient function.
The subgraph created by the decorated function is NOT put in a Defun and so
does not suffer from the limitations of the Defun (all subgraph ops on the
same device, no summaries).
Args:
grad_fn: function with signature
(inputs, variables, outputs, output_... | codesearchnet |
def set_tensor_final(self, tensor_name):
tensor = self._name_to_tensor(tensor_name)
self._final_tensors.add(tensor) | Denotes a tensor as a final output of the computation.
Args:
tensor_name: a string, name of a tensor in the graph. | codesearchnet |
def resolve_variables(self, provided_variables):
self.resolved_variables = {}
defined_variables = self.defined_variables()
variable_dict = dict((var.name, var) for var in provided_variables)
for var_name, var_def in defined_variables.items():
value = resolve_variable... | Resolve the values of the blueprint variables.
This will resolve the values of the `VARIABLES` with values from the
env file, the config, and any lookups resolved.
Args:
provided_variables (list of :class:`stacker.variables.Variable`):
list of provided variables | juraj-google-style |
def with_inverse(points, noise):
n_points = len(points)/2
break_point = n_points
points_part = copy.deepcopy(points)
points_part = list(reversed(points_part))
part = kalman_filter(points_part, noise)
total = kalman_filter(points, noise)
result = list(reversed(part))[:break_point]... | Smooths a set of points
It smooths them twice, once in given order, another one in the reverse order.
The the first half of the results will be taken from the reverse order and
the second half from the normal order.
Args:
points (:obj:`list` of :obj:`Point`)
noise (float): Expected noise, the higher it is the more th... | juraj-google-style |
def _SwitchRefOrTensor(data, pred, name='Switch'):
data = ops.convert_to_tensor_or_composite(data, name='data')
with ops.colocate_with(data, ignore_existing=True):
if isinstance(data, tensor_lib.Tensor):
if data.dtype._is_ref_dtype:
return ref_switch(data, pred, name=name)
... | Forwards `data` to an output determined by `pred`.
If `pred` is false, the `data` input is forwarded to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which... | github-repos |
def parse_individual(sample):
ind_info = {}
if 'sample_id' not in sample:
raise PedigreeError("One sample is missing 'sample_id'")
sample_id = sample['sample_id']
if 'sex' not in sample:
raise PedigreeError("Sample %s is missing 'sex'" % sample_id)
sex = sample['sex']
i... | Parse individual information
Args:
sample (dict)
Returns:
{
'individual_id': str,
'father': str,
'mother': str,
'display_name': str,
'sex': str,
'phenotype': str,
'bam_file': str,
'vcf2cytosure': str,
'analysis_type': str,
'capture_kits': list(str),
} | juraj-google-style |
def emit_counter(self, category: str, name: str, pid: int, timestamp: int, counter: str, value: int) -> None:
event = self._create_event('C', category, name, pid, 0, timestamp)
event['args'] = {counter: value}
self._events.append(event) | Emits a record for a single counter.
Args:
category: The event category as a string.
name: The event name as a string.
pid: Identifier of the process generating this event as an integer.
timestamp: The timestamp of this event as a long integer.
counter: Name of the counter as a string.
value: Value of the counter ... | github-repos |
def _extract_dir(self, dir_not_exists, output):
if not dir_not_exists:
lst = output.dir_cache
return {i["relpath"]: i["md5"] for i in lst}
return {} | Extract the content of dvc tree file
Args:
self(object) - Repo class instance
dir_not_exists(bool) - flag for directory existence
output(object) - OutputLOCAL class instance
Returns:
dict - dictionary with keys - paths to file in .dvc/cache
values -checksums for that files | juraj-google-style |
def write_object_proto_for_resource_variable(resource_variable, proto, options, enforce_naming=True):
proto.variable.SetInParent()
if enforce_naming and (not resource_variable.name.endswith(':0')):
raise ValueError(f"Cowardly refusing to save variable {resource_variable.name} because of unexpected suffi... | Writes additional information of the variable into the SavedObject proto.
This allows users to define a `hook` to provide extra information of the
variable to the SavedObject.
For example, DistributedVariable class would fill in components in the
distributed context.
Args:
resource_variable: A `ResourceVariable` or ... | github-repos |
def check_config_options(_class, required_options, optional_options, options):
for opt in required_options:
if (opt not in options):
msg = 'Required option missing: {0}'
raise ConfigurationError(msg.format(opt))
for opt in options:
if (opt not in (required_options + optio... | Helper method to check options.
Arguments:
_class -- the original class that takes received the options.
required_options -- the options that are required. If they are not
present, a ConfigurationError is raised. Given as a
tuple.
optional_options -- the options that are optional. Given options that are
not ... | codesearchnet |
def add_logged_in_session(self, response=None):
if (not response):
response = self.get('go/api/pipelines.xml')
self._set_session_cookie(response)
if (not self._session_id):
raise AuthenticationFailed('No session id extracted from request.')
response = self.get('go/pipelines')
match =... | Make the request appear to be coming from a browser
This is to interact with older parts of Go that doesn't have a
proper API call to be made. What will be done:
1. If no response passed in a call to `go/api/pipelines.xml` is
made to get a valid session
2. `JSESSIONID` will be populated from this request
3. A request... | codesearchnet |
def show_fields(self, block=None):
mapping = self._mapping()
if block is None:
return mapping
elif block == "top":
blocks = set()
for key in mapping.keys():
blocks.add(key.split(".")[0])
block_map = {}
for b in ... | Retrieve and return the mapping for the given metadata block.
Arguments:
block (str): The top-level field to fetch the mapping for (for example, ``"mdf"``),
or the special values ``None`` for everything or ``"top"`` for just the
top-level fields.
**Default:** ``None``.
index (str): The Search index to map. **Default:*... | juraj-google-style |
def restart(self, container, timeout=10):
params = {'t': timeout}
url = self._url('/containers/{0}/restart', container)
conn_timeout = self.timeout
if (conn_timeout is not None):
conn_timeout += timeout
res = self._post(url, params=params, timeout=conn_timeout)
self._raise_for_status(res... | Restart a container. Similar to the ``docker restart`` command.
Args:
container (str or dict): The container to restart. If a dict, the
``Id`` key is used.
timeout (int): Number of seconds to try to stop for before killing
the container. Once killed it will then be restarted. Default
is 10 seconds.
Raises:
:py:class:... | codesearchnet |
def check_prerequisites(prerequisites, checker, msg_tmpl='Prerequisites "{}" are required in method "{}" but not found, please install them first.'):
def wrap(func):
@functools.wraps(func)
def wrapped_func(*args, **kwargs):
requirements = ([prerequisites] if isinstance(prerequisites, s... | A decorator factory to check if prerequisites are satisfied.
Args:
prerequisites (str of list[str]): Prerequisites to be checked.
checker (callable): The checker method that returns True if a
prerequisite is meet, False otherwise.
msg_tmpl (str): The message template with two variables.
Returns:
decorator: A specific... | codesearchnet |
def lasio_get(l, section, item, attrib='value', default=None, remap=None, funcs=None):
remap = (remap or {})
item_to_fetch = remap.get(item, item)
if (item_to_fetch is None):
return None
try:
obj = getattr(l, section)
result = getattr(obj, item_to_fetch)[attrib]
except:
... | Grabs, renames and transforms stuff from a lasio object.
Args:
l (lasio): a lasio instance.
section (str): The LAS section to grab from, eg ``well``
item (str): The item in the LAS section to grab from, eg ``name``
attrib (str): The attribute of the item to grab, eg ``value``
default (str): What to return instead.
rem... | codesearchnet |
def get_ast_dict(belstr, component_type: str = ""):
errors = []
parsed = {}
bels = list(belstr)
char_locs, errors = parse_chars(bels, errors)
parsed, errors = parse_functions(belstr, char_locs, parsed, errors)
parsed, errors = parse_args(bels, char_locs, parsed, errors)
parsed, errors ... | Convert BEL string to AST dictionary
Args:
belstr: BEL string
component_type: Empty string or 'subject' or 'object' to indicate that we
are parsing the subject or object field input | juraj-google-style |
def send(self, **req_kwargs):
i = 0
while True:
response = self._send(**req_kwargs).json()
if ('error' not in response):
break
error = response['error']
if (error['code'] != 401):
raise exception.APIException(error['code'], error)
if (i >= self.RET... | Send an authenticated request to a Google API.
Automatically retries if the access token has expired.
Args:
**req_kwargs: Arbitrary keyword arguments to pass to Requests.
Return:
dict: The parsed JSON response.
Raises:
APIException: If the server returns an error.
LoginException: If :py:meth:`login` has not been cal... | codesearchnet |
def img_to_array(img, data_format=None, dtype=None):
data_format = backend.standardize_data_format(data_format)
if dtype is None:
dtype = backend.floatx()
x = np.asarray(img, dtype=dtype)
if len(x.shape) == 3:
if data_format == 'channels_first':
x = x.transpose(2, 0, 1)
e... | Converts a PIL Image instance to a NumPy array.
Example:
```python
from PIL import Image
img_data = np.random.random(size=(100, 100, 3))
img = keras.utils.array_to_img(img_data)
array = keras.utils.image.img_to_array(img)
```
Args:
img: Input PIL Image instance.
data_format: Image data format, can be either `"channe... | github-repos |
def _RemoveAuthorizedKeys(self, user):
pw_entry = self._GetUser(user)
if (not pw_entry):
return
home_dir = pw_entry.pw_dir
authorized_keys_file = os.path.join(home_dir, '.ssh', 'authorized_keys')
if os.path.exists(authorized_keys_file):
try:
os.remove(authorized_keys_file... | Remove a Linux user account's authorized keys file to prevent login.
Args:
user: string, the Linux user account to remove access. | codesearchnet |
def get_platform():
global PLATFORM
cmd = 'uname'
out, err = run_shell_cmd(cmd)
platform_detected = out.strip().lower()
if platform_detected != 'linux':
if err and FLAGS.debug:
print('Error in detecting platform:\n %s' % str(err))
print('Error: Detected unsupported operat... | Retrieves platform information.
Currently the script only support linux. If other platoforms such as Windows
or MacOS is detected, it throws an error and terminates.
Returns:
String that is platform type.
e.g. 'linux' | github-repos |
def __init__(self, lexer=None, **kwargs):
if lexer is not None:
if isinstance(lexer, JbossLexer):
self.lexer = lexer.lexer
else:
self.lexer = lexer
else:
self.lexer = JbossLexer().lexer
kwargs.setdefau... | Constructs the JsonParser based on the grammar contained herein.
Successful construction builds the ply.yacc instance and sets
self.parser.
Args:
lexer: A ply.lex or JsonLexer instance that will produce JSON_TOKENS. | juraj-google-style |
def parents(self, as_resources=False):
parents = [o for s,p,o in self.rdf.graph.triples((None, self.rdf.prefixes.fedora.hasParent, None))]
if as_resources:
logger.debug('retrieving parent as resource')
parents = [ self.repo.get_resource(parent) for parent in parents ]
return parents | method to return hierarchical parents of this resource
Args:
as_resources (bool): if True, opens each as appropriate resource type instead of return URI only
Returns:
(list): list of resources | juraj-google-style |
def find_connected_atoms(struct, tolerance=0.45, ldict=JmolNN().el_radius):
n_atoms = len(struct.species)
fc = np.array(struct.frac_coords)
species = list(map(str, struct.species))
for (i, item) in enumerate(species):
if (not (item in ldict.keys())):
species[i] = str(Specie.from_stri... | Finds the list of bonded atoms.
Args:
struct (Structure): Input structure
tolerance: length in angstroms used in finding bonded atoms. Two atoms are considered bonded if (radius of atom 1) + (radius of atom 2) + (tolerance) < (distance between atoms 1 and 2). Default value = 0.45, the value used by JMol and Cheon et a... | codesearchnet |
def update_config(self, config, timeout=(- 1)):
return self._client.update(config, uri=(self.URI + '/config'), timeout=timeout) | Updates the remote server configuration and the automatic backup schedule for backup.
Args:
config (dict): Object to update.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView, just stop waiting for its completion.
Returns:
dict: Backup details. | codesearchnet |
def get_special_tokens_mask(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None, already_has_special_tokens: bool=False) -> List[int]:
if already_has_special_tokens:
return super().get_special_tokens_mask(token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True)
... | Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding
special tokens using the tokenizer `prepare_for_model` method.
Args:
token_ids_0 (`List[int]`):
List of IDs.
token_ids_1 (`List[int]`, *optional*):
Optional second list of IDs for sequence pairs.
already_has_spe... | github-repos |
def fetch(self, transfer_id, data={}, **kwargs):
return super(Transfer, self).fetch(transfer_id, data, **kwargs) | Fetch Transfer for given Id
Args:
transfer_id : Id for which transfer object has to be retrieved
Returns:
Transfer dict for given transfer Id | juraj-google-style |
async def verify_task_types(chain):
valid_task_types = get_valid_task_types()
task_count = {}
for obj in chain.get_all_links_in_chain():
task_type = obj.task_type
log.info("Verifying {} {} as a {} task...".format(obj.name, obj.task_id, task_type))
task_count.setdefault(task_type... | Verify the task type (e.g. decision, build) of each link in the chain.
Args:
chain (ChainOfTrust): the chain we're operating on
Returns:
dict: mapping task type to the number of links. | juraj-google-style |
def __init__(self, name: str, ctx: 'context.Context'):
super().__init__(name, ctx)
self._cls = None
self.members = datatypes.MonitorDict()
self._instance_type_parameters: 'datatypes.AliasingMonitorDict[str, cfg.Variable]' = datatypes.AliasingMonitorDict()
self._maybe_missing_members: bool | None = N... | Initialize a SimpleValue.
Args:
name: Name of this value. For debugging and error reporting.
ctx: The abstract context. | github-repos |
def looks_like_url(url):
if (not isinstance(url, basestring)):
return False
if ((not isinstance(url, basestring)) or (len(url) >= 1024) or (not cre_url.match(url))):
return False
return True | Simplified check to see if the text appears to be a URL.
Similar to `urlparse` but much more basic.
Returns:
True if the url str appears to be valid.
False otherwise.
>>> url = looks_like_url("totalgood.org")
>>> bool(url)
True | codesearchnet |
def _check_module_is_image_embedding(module_spec):
issues = []
input_info_dict = module_spec.get_input_info_dict()
if (list(input_info_dict.keys()) != ["images"] or
input_info_dict["images"].dtype != tf.float32):
issues.append("Module 'default' signature must require a single input, "
... | Raises ValueError if `module_spec` is not usable as image embedding.
Args:
module_spec: A `_ModuleSpec` to test.
Raises:
ValueError: if `module_spec` default signature is not compatible with
mappingan "images" input to a Tensor(float32, shape=(_,K)). | juraj-google-style |
def _CheckIsFile(self, file_entry):
if definitions.FILE_ENTRY_TYPE_FILE not in self._file_entry_types:
return False
return file_entry.IsFile() | Checks the is_file find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | juraj-google-style |
def grep(regex, output):
lines = output.decode('utf-8').strip().splitlines()
results = []
for line in lines:
if re.search(regex, line):
results.append(line.strip())
return results | Similar to linux's `grep`, this returns the line in an output stream
that matches a given regex pattern.
It does not rely on the `grep` binary and is not sensitive to line endings,
so it can be used cross-platform.
Args:
regex: string, a regex that matches the expected pattern.
output: byte string, the raw output of ... | codesearchnet |
def record_corrected_value(self, value, expected_interval, count=1):
while True:
if not self.record_value(value, count):
return False
if value <= expected_interval or expected_interval <= 0:
return True
value -= expected_interval | Record a new value into the histogram and correct for
coordinated omission if needed
Args:
value: the value to record (must be in the valid range)
expected_interval: the expected interval between 2 value samples
count: incremental count (defaults to 1) | juraj-google-style |
def parse_response(service, response, search_type):
_LOG.debug('Parse response "%s" from service "%s" of type "%s"', response, service, search_type)
items = []
if ('searchResult' in response):
response = response['searchResult']
elif ('getMetadataResult' in response):
response = response... | Parse the response to a music service query and return a SearchResult
Args:
service (MusicService): The music service that produced the response
response (OrderedDict): The response from the soap client call
search_type (str): A string that indicates the search type that the
response is from
Returns:
SearchResult: A ... | codesearchnet |
def _process(compressor, input_filename, output_filename):
compressor(input_filename, output_filename)
result_size = os.path.getsize(output_filename)
return _CompressorResult(result_size, output_filename, compressor.__name__) | Helper function to compress an image.
Returns:
_CompressorResult named tuple, with the resulting size, the name of the
output file and the name of the compressor. | codesearchnet |
def percent_point(self, U):
self.check_fit()
return norm.ppf(U, loc=self.mean, scale=self.std) | Given a cumulated distribution value, returns a value in original space.
Arguments:
U: `np.ndarray` of shape (n, 1) and values in [0,1]
Returns:
`np.ndarray`: Estimated values in original space. | juraj-google-style |
def open_writer(self, init_result, uid):
raise NotImplementedError | Opens a writer for writing a bundle of elements to the sink.
Args:
init_result: the result of initialize_write() invocation.
uid: a unique identifier generated by the system.
Returns:
an ``iobase.Writer`` that can be used to write a bundle of records to the
current sink. | github-repos |
def load_model_from_hdf5(filepath, custom_objects=None, compile=True):
if h5py is None:
raise ImportError('`load_model()` using h5 format requires h5py. Could not import h5py.')
if not custom_objects:
custom_objects = {}
gco = object_registration.GLOBAL_CUSTOM_OBJECTS
tlco = global_state... | Loads a model saved via `save_model_to_hdf5`.
Args:
filepath: One of the following:
- String, path to the saved model
- `h5py.File` object from which to load the model
custom_objects: Optional dictionary mapping names
(strings) to custom classes or functions to be
considered during deserialization.
compile: Boolean, w... | github-repos |
def AddCalledComponent(self, component, target, args, filename, lineno, capacity, action=CALLED_CALLABLE):
element = FireTraceElement(component=component, action=action, target=target, args=args, filename=filename, lineno=lineno, capacity=capacity)
self.elements.append(element) | Adds an element to the trace indicating that a component was called.
Also applies to instantiating a class.
Args:
component: The result of calling the callable.
target: The name of the callable.
args: The args consumed in order to call this callable.
filename: The file in which the callable is defined, or None if N/A... | github-repos |
def get_appliance(self, id_or_uri, fields=''):
uri = ((self.URI + '/image-streamer-appliances/') + extract_id_from_uri(id_or_uri))
if fields:
uri += ('?fields=' + fields)
return self._client.get(uri) | Gets the particular Image Streamer resource based on its ID or URI.
Args:
id_or_uri:
Can be either the Os Deployment Server ID or the URI
fields:
Specifies which fields should be returned in the result.
Returns:
dict: Image Streamer resource. | codesearchnet |
def copy_fhir_type_with_root_element_definition(self, root_element_definition: message.Message) -> 'FhirPathDataType':
return dataclasses.replace(self, root_element_definition=root_element_definition) | Copies the type and sets the root_element_definition.
Args:
root_element_definition: Element definition to set for the type.
Returns:
A copy of the original type with the root_element_definition set. | github-repos |
async def find_user(cls, config: Config, user: str) \
-> Tuple[str, str]:
with open(config.users_file, 'r') as users_file:
for line in users_file:
this_user, user_dir, password = line.split(':', 2)
if user == this_user:
return ... | If the given user ID exists, return its expected password and
mailbox path. Override this method to implement custom login logic.
Args:
config: The maildir config object.
user: The expected user ID.
Raises:
InvalidAuth: The user ID was not valid. | juraj-google-style |
def calculate_bv_sum(site, nn_list, scale_factor=1.0):
el1 = Element(site.specie.symbol)
bvsum = 0
for (nn, dist) in nn_list:
el2 = Element(nn.specie.symbol)
if (((el1 in ELECTRONEG) or (el2 in ELECTRONEG)) and (el1 != el2)):
r1 = BV_PARAMS[el1]['r']
r2 = BV_PARAMS[el... | Calculates the BV sum of a site.
Args:
site:
The site
nn_list:
List of nearest neighbors in the format [(nn_site, dist), ...].
scale_factor:
A scale factor to be applied. This is useful for scaling distance,
esp in the case of calculation-relaxed structures which may tend
to under (GGA) or over bind (LDA). | codesearchnet |
def _usage_id_from_node(self, node, parent_id, id_generator=None):
if (id_generator is not None):
warnings.warn('Passing an id_generator directly is deprecated in favor of constructing the Runtime with the id_generator', DeprecationWarning, stacklevel=3)
id_generator = (id_generator or self.id_generator... | Create a new usage id from an XML dom node.
Args:
node (lxml.etree.Element): The DOM node to interpret.
parent_id: The usage ID of the parent block
id_generator (IdGenerator): The :class:`.IdGenerator` to use
for creating ids | codesearchnet |
def start_services(self, service_alises):
for name in service_alises:
if name not in self._service_objects:
raise Error(self._device, 'No service is registered under the name "%s", cannot start.' % name)
service = self._service_objects[name]
if not service.is_alive:
s... | Starts the specified services.
Services will be started in the order specified by the input list.
No-op for services that are already running.
Args:
service_alises: list of strings, the aliases of services to start. | github-repos |
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
super(CancelResponsePayload, self).read(input_stream, kmip_version=kmip_version)
local_stream = utils.BytearrayStream(input_stream.read(self.length))
if self.is_tag_next(enums.Tags.ASYNCHRONOUS_CORRELATION_VALUE, local_stream):
s... | Read the data encoding the Cancel response payload and decode it into
its constituent parts.
Args:
input_stream (stream): A data stream containing encoded object
data, supporting a read method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object ... | codesearchnet |
def get_case(family_lines, family_type='ped', vcf_path=None):
family = None
LOG.info("Parsing family information")
family_parser = FamilyParser(family_lines, family_type)
families = list(family_parser.families.keys())
LOG.info("Found families {0}".format(', '.join(families)))
... | Return ped_parser case from a family file
Create a dictionary with case data. If no family file is given create from VCF
Args:
family_lines (iterator): The family lines
family_type (str): The format of the family lines
vcf_path(str): Path to VCF
Returns:
family (Family): A ped_parser family object | juraj-google-style |
def _FormatSizeInUnitsOf1024(self, size):
magnitude_1024 = 0
used_memory_1024 = float(size)
while (used_memory_1024 >= 1024):
used_memory_1024 /= 1024
magnitude_1024 += 1
if (0 < magnitude_1024 <= 7):
return '{0:.1f} {1:s}'.format(used_memory_1024, self._UNITS_1024[magnitude_1024... | Represents a number of bytes in units of 1024.
Args:
size (int): size in bytes.
Returns:
str: human readable string of the size. | codesearchnet |
def resolve_peer_creds(self):
if (not IS_UID_GID_RESOLVABLE):
raise NotImplementedError('UID/GID lookup is unavailable under current platform. It can only be done under UNIX-like OS but not under the Google App Engine')
elif (not self.peercreds_resolve_enabled):
raise RuntimeError('UID/GID looku... | Return the username and group tuple of the peercreds if available.
Raises:
NotImplementedError: in case of unsupported OS
RuntimeError: in case of UID/GID lookup unsupported or disabled | codesearchnet |
def from_string(cls, cl_function, dependencies=(), nmr_constraints=None):
return_type, function_name, parameter_list, body = split_cl_function(cl_function)
return SimpleConstraintFunction(return_type, function_name, parameter_list, body, dependencies=dependencies,
... | Parse the given CL function into a SimpleCLFunction object.
Args:
cl_function (str): the function we wish to turn into an object
dependencies (list or tuple of CLLibrary): The list of CL libraries this function depends on
Returns:
SimpleCLFunction: the CL data type for this parameter declaration | juraj-google-style |
def log_softmax(x, axis=-1):
return ops.log_softmax(x, axis=axis) | Log-Softmax activation function.
Each input vector is handled independently.
The `axis` argument sets which axis of the input the function
is applied along.
Args:
x: Input tensor.
axis: Integer, axis along which the softmax is applied. | github-repos |
def get_next_as_optional(self):
raise NotImplementedError('Iterator.get_next_as_optional()') | Returns the next element wrapped in `tf.experimental.Optional`.
If the iterator has reached the end of the sequence, the returned
`tf.experimental.Optional` will have no value.
>>> dataset = tf.data.Dataset.from_tensors(42)
>>> iterator = iter(dataset)
>>> optional = iterator.get_next_as_optional()
>>> print(optional... | github-repos |
def with_subject(self, subject):
return self.__class__(self._signer, service_account_email=self._service_account_email, scopes=self._scopes, token_uri=self._token_uri, subject=subject, project_id=self._project_id, additional_claims=self._additional_claims.copy()) | Create a copy of these credentials with the specified subject.
Args:
subject (str): The subject claim.
Returns:
google.auth.service_account.Credentials: A new credentials
instance. | codesearchnet |
def set(self, refresh_token):
logger.info('Saving refresh_token to %s', repr(self._filename))
try:
with open(self._filename, 'w') as f:
f.write(refresh_token)
except IOError as e:
logger.warning('Failed to save refresh_token: %s', e) | Cache a refresh token, ignoring any failure.
Args:
refresh_token (str): Refresh token to cache. | codesearchnet |
def _GetTaskStorageFilePath(self, task):
filename = '{0:s}.plaso'.format(task.identifier)
return os.path.join(self._task_storage_path, filename) | Retrieves the path of a task storage file in the temporary directory.
Args:
task (Task): task.
Returns:
str: path of a task storage file in the temporary directory. | codesearchnet |
def get_acmg(acmg_terms):
prediction = 'uncertain_significance'
pvs = False
ps_terms = []
pm_terms = []
pp_terms = []
ba = False
bs_terms = []
bp_terms = []
for term in acmg_terms:
if term.startswith('PVS'):
pvs = True
... | Use the algorithm described in ACMG paper to get a ACMG calssification
Args:
acmg_terms(set(str)): A collection of prediction terms
Returns:
prediction(int):
0 - Uncertain Significanse
1 - Benign
2 - Likely Benign
3 - Likely Pathogenic
4 - Pathogenic | juraj-google-style |
def _set_value(self, slot_record):
if slot_record.status == _SlotRecord.FILLED:
self.filled = True
self._filler_pipeline_key = _SlotRecord.filler.get_value_for_datastore(
slot_record)
self._fill_datetime = slot_record.fill_time
self._value = slot_record.value | Sets the value of this slot based on its corresponding _SlotRecord.
Does nothing if the slot has not yet been filled.
Args:
slot_record: The _SlotRecord containing this Slot's value. | juraj-google-style |
def ProcessFile(filename, vlevel, extra_check_functions=None):
_SetVerboseLevel(vlevel)
_BackupFilters()
if not ProcessConfigOverrides(filename):
_RestoreFilters()
return
lf_lines = []
crlf_lines = []
try:
if filename == '-':
lines = codecs.StreamRe... | Does google-lint on a single file.
Args:
filename: The name of the file to parse.
vlevel: The level of errors to report. Every error of confidence
>= verbose_level will be reported. 0 is a good default.
extra_check_functions: An array of additional check functions that will be
run on each source line. Each functio... | juraj-google-style |
def InTemplateArgumentList(self, clean_lines, linenum, pos):
while linenum < clean_lines.NumLines():
line = clean_lines.elided[linenum]
match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:])
if not match:
linenum += 1
pos = 0
continue
token = match.group(1)
... | Check if current position is inside template argument list.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
pos: position just after the suspected template argument.
Returns:
True if (linenum, pos) is inside template arguments. | juraj-google-style |
def from_dict(cls, config_dict, return_unused_kwargs=False, **kwargs):
if 'quantization_config' in config_dict:
config_dict = dict(sparsity_config=config_dict.get('sparsity_config'), **config_dict['quantization_config'])
return super().from_dict(config_dict, return_unused_kwargs=return_unused_kwargs, **... | Instantiates a [`CompressedTensorsConfig`] from a Python dictionary of parameters.
Optionally unwraps any args from the nested quantization_config
Args:
config_dict (`Dict[str, Any]`):
Dictionary that will be used to instantiate the configuration object.
return_unused_kwargs (`bool`,*optional*, defaults to `False`):
W... | github-repos |
def update_splits_if_different(self, split_dict):
assert isinstance(split_dict, splits_lib.SplitDict)
if (self._splits and splits_lib.check_splits_equals(self._splits, split_dict)):
return
self._set_splits(split_dict) | Overwrite the splits if they are different from the current ones.
* If splits aren't already defined or different (ex: different number of
shards), then the new split dict is used. This will trigger stats
computation during download_and_prepare.
* If splits are already defined in DatasetInfo and similar (same names an... | codesearchnet |
def compile_dependencies(self, sourcepath, include_self=False):
items = self.inspector.parents(sourcepath)
if include_self:
items.add(sourcepath)
return filter(None, [self.compile_source(item) for item in items]) | Apply compile on all dependencies
Args:
sourcepath (string): Sass source path to compile to its
destination using project settings.
Keyword Arguments:
include_self (bool): If ``True`` the given sourcepath is add to
items to compile, else only its dependencies are compiled. | juraj-google-style |
def init_properties(env='dev', app='unnecessary', **_):
aws_env = boto3.session.Session(profile_name=env)
s3client = aws_env.resource('s3')
generated = get_details(app=app, env=env)
archaius = generated.archaius()
archaius_file = ('{path}/application.properties').format(path=archaius['path'])... | Make sure _application.properties_ file exists in S3.
For Applications with Archaius support, there needs to be a file where the
cloud environment variable points to.
Args:
env (str): Deployment environment/account, i.e. dev, stage, prod.
app (str): GitLab Project name.
Returns:
True when application.properties was ... | juraj-google-style |
def parseArgs(args):
if not isinstance(args, (list,tuple)):
raise ValueError('args is not a list or tuple')
dRet = {}
for s in args:
oRes = re.match(u'^--([^=]+)(?:=(.+))?$', s)
if oRes:
mGroup2 = oRes.group(2)
dRet[oRes.group(1)] = (not mGroup2 and True or mGroup2)
else:
... | Parse Arguments
Used to parse the arguments passed to the script
Args:
args (list): A list of strings representing arguments to a script
Returns:
dict: Returns a dictionary with args as keys and the values sent with
them or True for valueless arguments
Raises:
ValueError: If args is not a list or tuple | juraj-google-style |
def redirect_stdout(new_stdout):
old_stdout, sys.stdout = sys.stdout, new_stdout
try:
yield None
finally:
sys.stdout = old_stdout | Redirect the stdout
Args:
new_stdout (io.StringIO): New stdout to use instead | juraj-google-style |
def iter_variants_by_names(self, names):
if not self.is_parallel:
yield from super().iter_variants_by_names(names)
else:
for info, dosage in self._bgen.iter_variants_by_names(names):
yield Genotypes(
Variant(info.name,
... | Iterates over the genotypes for variants using a list of names.
Args:
names (list): The list of names for variant extraction. | juraj-google-style |
def __init__(self, output_mediator):
super(OutputModule, self).__init__()
self._output_mediator = output_mediator | Initializes an output module.
Args:
output_mediator (OutputMediator): mediates interactions between output
modules and other components, such as storage and dfvfs.
Raises:
ValueError: when there are unused keyword arguments. | juraj-google-style |
def parse_function_params(params):
function_meta = {'args': [], 'kwargs': {}}
params_str = params.strip()
if (params_str == ''):
return function_meta
args_list = params_str.split(',')
for arg in args_list:
arg = arg.strip()
if ('=' in arg):
(key, value) = arg.spli... | parse function params to args and kwargs.
Args:
params (str): function param in string
Returns:
dict: function meta dict
{
"args": [],
"kwargs": {}
}
Examples:
>>> parse_function_params("")
{'args': [], 'kwargs': {}}
>>> parse_function_params("5")
{'args': [5], 'kwargs': {}}
>>> parse_function_params("1, 2")
{'ar... | codesearchnet |
def relative_probability_from_lookup_table( self, jump_lookup_table ):
l1 = self.initial_site.label
l2 = self.final_site.label
c1 = self.initial_site.nn_occupation()
c2 = self.final_site.nn_occupation()
return jump_lookup_table.jump_probability[ l1 ][ l2 ][ c1 ][ c2 ] | Relative probability of accepting this jump from a lookup-table.
Args:
jump_lookup_table (LookupTable): the lookup table to be used for this jump.
Returns:
(Float): relative probability of accepting this jump. | juraj-google-style |
def _add_remove_user_template(self, url, template_id, account_id=None, email_address=None):
if ((not email_address) and (not account_id)):
raise HSException('No email address or account_id specified')
data = {}
if (account_id is not None):
data = {'account_id': account_id}
else:
... | Add or Remove user from a Template
We use this function for two tasks because they have the same API call
Args:
template_id (str): The id of the template
account_id (str): ID of the account to add/remove access to/from
email_address (str): The email_address of the account to add/remove access to/from... | codesearchnet |
def _module_info_from_proto(module_info_def, import_scope=None):
graph = tf.get_default_graph()
def prepend_name_scope(name_scope):
return ops.prepend_name_scope(name_scope, import_scope)
def process_leafs(name):
return _path_to_graph_element(prepend_name_scope(name), graph)
connected_... | Deserializes `module_info_def` proto.
Args:
module_info_def: An instance of `module_pb2.SonnetModule`.
import_scope: Optional `string`. Name scope to use.
Returns:
An instance of `ModuleInfo`.
Raises:
base_errors.ModuleInfoError: If the probobuf is of the wrong type or
if some of its fields are missing. | codesearchnet |
def debug(self, status=None, nids=None):
(nrows, ncols) = get_terminal_size()
sched_excfile = os.path.join(self.workdir, '_exceptions')
if os.path.exists(sched_excfile):
with open(sched_excfile, 'r') as fh:
cprint('Found exceptions raised by the scheduler', 'red')
cprint(fh.r... | This method is usually used when the flow didn't completed succesfully
It analyzes the files produced the tasks to facilitate debugging.
Info are printed to stdout.
Args:
status: If not None, only the tasks with this status are selected
nids: optional list of node identifiers used to filter the tasks. | codesearchnet |
def serialize(self, accumulator):
pass | Serialize an accumulator for a remote call.
This function serializes an accumulator to be sent to a remote process.
Args:
accumulator: The accumulator to serialize.
Returns:
A byte string representing the passed accumulator. | github-repos |
def copy(self, src, dst, other_system=None):
copy_source = self.get_client_kwargs(src)
copy_destination = self.get_client_kwargs(dst)
with _handle_client_error():
self.client.copy_object(CopySource=copy_source, **copy_destination) | Copy object of the same storage.
Args:
src (str): Path or URL.
dst (str): Path or URL.
other_system (pycosio._core.io_system.SystemBase subclass): Unused. | juraj-google-style |
def regrep(filename, patterns, reverse=False, terminate_on_match=False, postprocess=str):
compiled = {k: re.compile(v) for (k, v) in patterns.items()}
matches = collections.defaultdict(list)
gen = (reverse_readfile(filename) if reverse else zopen(filename, 'rt'))
for (i, l) in enumerate(gen):
fo... | A powerful regular expression version of grep.
Args:
filename (str): Filename to grep.
patterns (dict): A dict of patterns, e.g.,
{"energy": "energy\(sigma->0\)\s+=\s+([\d\-\.]+)"}.
reverse (bool): Read files in reverse. Defaults to false. Useful for
large files, especially when used with terminate_on_match.
terminate... | codesearchnet |
def __init__(self, pidfile, logger, port = 64042, host = 'localhost'):
super(RemoteControllerDeamon, self).__init__(pidfile, logger)
self.__port = port
self.__host = host
for name in dir(self):
method = getattr(self, name)
if hasattr(method, 'registered_f... | Create a daemon which is controllable via jsonrpc with decorator
Args:
pidfile (str): path to create pid file
logger (logging.Logger): logger for the daemon
port (int):
host (str): | juraj-google-style |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.